phase1: remove unused 'other_builds'
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
57
58 ####### PROJECT IDENTITY
59
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
63
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
66
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
71 # without some help.
72
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
74
75 ####### BUILDWORKERS
76
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
80
81 worker_port = 9989
82
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
123 # --master option)
124 c['protocols'] = {'pb': {'port': worker_port}}
125
126 # coalesce builds
127 c['collapseRequests'] = True
128
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
132 hour=6,
133 )]
134
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
142
143 @returns: datetime instance or None, via Deferred
144 """
145
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
149 [
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 ],
153 order=['-complete_at'], limit=1)
154 if not completed:
155 return
156
157 complete_at = completed[0]['complete_at']
158
159 last_build = yield bldr.master.data.get(
160 ('builds', ),
161 [
162 resultspec.Filter('builderid', 'eq', [bldrid]),
163 ],
164 order=['-started_at'], limit=1)
165
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
170
171 return complete_at
172
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
176 not skipped build.
177
178 @returns: list of sorted builders
179 """
180
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
183
184 def bldr_info(bldr):
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
187 return d
188
189 def bldr_sort(item):
190 (complete_at, bldr) = item
191
192 if not complete_at:
193 date = datetime.min
194 complete_at = date.replace(tzinfo=tzutc())
195
196 if is_building(bldr):
197 date = datetime.max
198 complete_at = date.replace(tzinfo=tzutc())
199
200 return (complete_at, bldr.name)
201
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
204
205 for r in results:
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207
208 return [r[1] for r in results]
209
210 c['prioritizeBuilders'] = prioritizeBuilders
211
212 ####### CHANGESOURCES
213
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = 0
217 cc_version = None
218
219 cc_command = "gcc"
220 cxx_command = "g++"
221
222 config_seed = ""
223
224 git_ssh = False
225 git_ssh_key = None
226
227 if ini.has_option("phase1", "expire"):
228 tree_expire = ini.getint("phase1", "expire")
229
230 if ini.has_option("phase1", "cc_version"):
231 cc_version = ini.get("phase1", "cc_version").split()
232 if len(cc_version) == 1:
233 cc_version = ["eq", cc_version[0]]
234
235 if ini.has_option("general", "git_ssh"):
236 git_ssh = ini.getboolean("general", "git_ssh")
237
238 if ini.has_option("general", "git_ssh_key"):
239 git_ssh_key = ini.get("general", "git_ssh_key")
240 else:
241 git_ssh = False
242
243 if ini.has_option("phase1", "config_seed"):
244 config_seed = ini.get("phase1", "config_seed")
245
246 repo_url = ini.get("repo", "url")
247 repo_branch = "master"
248
249 if ini.has_option("repo", "branch"):
250 repo_branch = ini.get("repo", "branch")
251
252 rsync_bin_url = ini.get("rsync", "binary_url")
253 rsync_bin_key = ini.get("rsync", "binary_password")
254 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
255
256 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
257 rsync_bin_defopts += ["--contimeout=20"]
258
259 rsync_src_url = None
260 rsync_src_key = None
261 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
262
263 if ini.has_option("rsync", "source_url"):
264 rsync_src_url = ini.get("rsync", "source_url")
265 rsync_src_key = ini.get("rsync", "source_password")
266
267 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
268 rsync_src_defopts += ["--contimeout=20"]
269
270 usign_key = None
271 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
272
273 if ini.has_option("usign", "key"):
274 usign_key = ini.get("usign", "key")
275
276 if ini.has_option("usign", "comment"):
277 usign_comment = ini.get("usign", "comment")
278
279 enable_kmod_archive = False
280 embed_kmod_repository = False
281
282 if ini.has_option("phase1", "kmod_archive"):
283 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
284
285 if ini.has_option("phase1", "kmod_repository"):
286 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
287
288
289 # find targets
290 targets = [ ]
291
292 if not os.path.isdir(work_dir+'/source.git'):
293 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
294 else:
295 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
296
297 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
298 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
299 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
300
301 while True:
302 line = findtargets.stdout.readline()
303 if not line:
304 break
305 ta = line.decode().strip().split(' ')
306 targets.append(ta[0])
307
308
309 # the 'change_source' setting tells the buildmaster how it should find out
310 # about source code changes. Here we point to the buildbot clone of pyflakes.
311
312 c['change_source'] = []
313 c['change_source'].append(GitPoller(
314 repo_url,
315 workdir=work_dir+'/work.git', branch=repo_branch,
316 pollinterval=300))
317
318 ####### SCHEDULERS
319
320 # Configure the Schedulers, which decide how to react to incoming changes. In this
321 # case, just kick off a 'basebuild' build
322
323 class TagChoiceParameter(BaseParameter):
324 spec_attributes = ["strict", "choices"]
325 type = "list"
326 strict = True
327
328 def __init__(self, name, label=None, **kw):
329 super().__init__(name, label, **kw)
330 self._choice_list = []
331
332 @property
333 def choices(self):
334 taglist = []
335 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
336
337 if basever:
338 findtags = subprocess.Popen(
339 ['git', 'ls-remote', '--tags', repo_url],
340 stdout = subprocess.PIPE)
341
342 while True:
343 line = findtags.stdout.readline()
344
345 if not line:
346 break
347
348 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
349
350 if tagver and tagver[1].find(basever[1]) == 0:
351 taglist.append(tagver[1])
352
353 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
354 taglist.insert(0, '')
355
356 self._choice_list = taglist
357
358 return self._choice_list
359
360 def parse_from_arg(self, s):
361 if self.strict and s not in self._choice_list:
362 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
363 return s
364
365 c['schedulers'] = []
366 c['schedulers'].append(SingleBranchScheduler(
367 name = "all",
368 change_filter = filter.ChangeFilter(branch=repo_branch),
369 treeStableTimer = 60,
370 builderNames = targets))
371
372 c['schedulers'].append(ForceScheduler(
373 name = "force",
374 buttonName = "Force builds",
375 label = "Force build details",
376 builderNames = [ "00_force_build" ],
377
378 codebases = [
379 util.CodebaseParameter(
380 "",
381 label = "Repository",
382 branch = util.FixedParameter(name = "branch", default = ""),
383 revision = util.FixedParameter(name = "revision", default = ""),
384 repository = util.FixedParameter(name = "repository", default = ""),
385 project = util.FixedParameter(name = "project", default = "")
386 )
387 ],
388
389 reason = util.StringParameter(
390 name = "reason",
391 label = "Reason",
392 default = "Trigger build",
393 required = True,
394 size = 80
395 ),
396
397 properties = [
398 util.NestedParameter(
399 name="options",
400 label="Build Options",
401 layout="vertical",
402 fields=[
403 util.ChoiceStringParameter(
404 name = "target",
405 label = "Build target",
406 default = "all",
407 choices = [ "all" ] + targets
408 ),
409 TagChoiceParameter(
410 name = "tag",
411 label = "Build tag",
412 default = ""
413 )
414 ]
415 )
416 ]
417 ))
418
419 ####### BUILDERS
420
421 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
422 # what steps, and which workers can execute them. Note that any particular build will
423 # only take place on one worker.
424
425 CleanTargetMap = [
426 [ "tools", "tools/clean" ],
427 [ "chain", "toolchain/clean" ],
428 [ "linux", "target/linux/clean" ],
429 [ "dir", "dirclean" ],
430 [ "dist", "distclean" ]
431 ]
432
433 def IsMakeCleanRequested(pattern):
434 def CheckCleanProperty(step):
435 val = step.getProperty("clean")
436 if val and re.match(pattern, val):
437 return True
438 else:
439 return False
440
441 return CheckCleanProperty
442
443 def IsSharedWorkdir(step):
444 return bool(step.getProperty("shared_wd"))
445
446 def IsCleanupRequested(step):
447 if IsSharedWorkdir(step):
448 return False
449 do_cleanup = step.getProperty("do_cleanup")
450 if do_cleanup:
451 return True
452 else:
453 return False
454
455 def IsExpireRequested(step):
456 if IsSharedWorkdir(step):
457 return False
458 else:
459 return not IsCleanupRequested(step)
460
461 def IsGitFreshRequested(step):
462 do_cleanup = step.getProperty("do_cleanup")
463 if do_cleanup:
464 return True
465 else:
466 return False
467
468 def IsGitCleanRequested(step):
469 return not IsGitFreshRequested(step)
470
471 def IsTaggingRequested(step):
472 val = step.getProperty("tag")
473 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
474 return True
475 else:
476 return False
477
478 def IsNoTaggingRequested(step):
479 return not IsTaggingRequested(step)
480
481 def IsNoMasterBuild(step):
482 return repo_branch != "master"
483
484 def GetBaseVersion():
485 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
486 return repo_branch.split('-')[1]
487 else:
488 return "master"
489
490 @properties.renderer
491 def GetVersionPrefix(props):
492 basever = GetBaseVersion()
493 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
494 return "%s/" % props["tag"]
495 elif basever != "master":
496 return "%s-SNAPSHOT/" % basever
497 else:
498 return ""
499
500 @properties.renderer
501 def GetNumJobs(props):
502 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
503 return str(int(int(props["nproc"]) / props["max_builds"]))
504 else:
505 return "1"
506
507 @properties.renderer
508 def GetCC(props):
509 if props.hasProperty("cc_command"):
510 return props["cc_command"]
511 else:
512 return "gcc"
513
514 @properties.renderer
515 def GetCXX(props):
516 if props.hasProperty("cxx_command"):
517 return props["cxx_command"]
518 else:
519 return "g++"
520
521 @properties.renderer
522 def GetCwd(props):
523 if props.hasProperty("builddir"):
524 return props["builddir"]
525 elif props.hasProperty("workdir"):
526 return props["workdir"]
527 else:
528 return "/"
529
530 @properties.renderer
531 def GetCCache(props):
532 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
533 return props["ccache_command"]
534 else:
535 return ""
536
537 def GetNextBuild(builder, requests):
538 for r in requests:
539 if r.properties and r.properties.hasProperty("tag"):
540 return r
541
542 r = requests[0]
543 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
544 return r
545
546 def MakeEnv(overrides=None, tryccache=False):
547 env = {
548 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
549 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
550 }
551 if tryccache:
552 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
553 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
554 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
555 else:
556 env['CC'] = env['CCC']
557 env['CXX'] = env['CCXX']
558 env['CCACHE'] = ''
559 if overrides is not None:
560 env.update(overrides)
561 return env
562
563 @properties.renderer
564 def NetLockDl(props):
565 lock = None
566 if props.hasProperty("dl_lock"):
567 lock = NetLocks[props["dl_lock"]]
568 if lock is not None:
569 return [lock.access('exclusive')]
570 else:
571 return []
572
573 @properties.renderer
574 def NetLockUl(props):
575 lock = None
576 if props.hasProperty("ul_lock"):
577 lock = NetLocks[props["ul_lock"]]
578 if lock is not None:
579 return [lock.access('exclusive')]
580 else:
581 return []
582
583 @util.renderer
584 def TagPropertyValue(props):
585 if props.hasProperty("options"):
586 options = props.getProperty("options")
587 if type(options) is dict:
588 return options.get("tag")
589 return None
590
591 def IsTargetSelected(target):
592 def CheckTargetProperty(step):
593 try:
594 options = step.getProperty("options")
595 if type(options) is dict:
596 selected_target = options.get("target", "all")
597 if selected_target != "all" and selected_target != target:
598 return False
599 except KeyError:
600 pass
601
602 return True
603
604 return CheckTargetProperty
605
606 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
607 try:
608 seckey = base64.b64decode(seckey)
609 except:
610 return None
611
612 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
613 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
614
615
616 c['builders'] = []
617
618 dlLock = locks.WorkerLock("worker_dl")
619
620 checkBuiltin = re.sub('[\t\n ]+', ' ', """
621 checkBuiltin() {
622 local symbol op path file;
623 for file in $CHANGED_FILES; do
624 case "$file" in
625 package/*/*) : ;;
626 *) return 0 ;;
627 esac;
628 done;
629 while read symbol op path; do
630 case "$symbol" in package-*)
631 symbol="${symbol##*(}";
632 symbol="${symbol%)}";
633 for file in $CHANGED_FILES; do
634 case "$file" in "package/$path/"*)
635 grep -qsx "$symbol=y" .config && return 0
636 ;; esac;
637 done;
638 esac;
639 done < tmp/.packagedeps;
640 return 1;
641 }
642 """).strip()
643
644
645 class IfBuiltinShellCommand(ShellCommand):
646 def _quote(self, str):
647 if re.search("[^a-zA-Z0-9/_.-]", str):
648 return "'%s'" %(re.sub("'", "'\"'\"'", str))
649 return str
650
651 def setCommand(self, command):
652 if not isinstance(command, (str, unicode)):
653 command = ' '.join(map(self._quote, command))
654 self.command = [
655 '/bin/sh', '-c',
656 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
657 ]
658
659 def setupEnvironment(self, cmd):
660 workerEnv = self.workerEnvironment
661 if workerEnv is None:
662 workerEnv = { }
663 changedFiles = { }
664 for request in self.build.requests:
665 for source in request.sources:
666 for change in source.changes:
667 for file in change.files:
668 changedFiles[file] = True
669 fullSlaveEnv = workerEnv.copy()
670 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
671 cmd.args['env'] = fullSlaveEnv
672
673 workerNames = [ ]
674
675 for worker in c['workers']:
676 workerNames.append(worker.workername)
677
678 force_factory = BuildFactory()
679
680 c['builders'].append(BuilderConfig(
681 name = "00_force_build",
682 workernames = workerNames,
683 factory = force_factory))
684
685 for target in targets:
686 ts = target.split('/')
687
688 factory = BuildFactory()
689
690 # setup shared work directory if required
691 factory.addStep(ShellCommand(
692 name = "sharedwd",
693 description = "Setting up shared work directory",
694 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
695 workdir = ".",
696 haltOnFailure = True,
697 doStepIf = IsSharedWorkdir))
698
699 # find number of cores
700 factory.addStep(SetPropertyFromCommand(
701 name = "nproc",
702 property = "nproc",
703 description = "Finding number of CPUs",
704 command = ["nproc"]))
705
706 # find gcc and g++ compilers
707 factory.addStep(FileDownload(
708 name = "dlfindbinpl",
709 mastersrc = scripts_dir + '/findbin.pl',
710 workerdest = "../findbin.pl",
711 mode = 0o755))
712
713 factory.addStep(SetPropertyFromCommand(
714 name = "gcc",
715 property = "cc_command",
716 description = "Finding gcc command",
717 command = [
718 "../findbin.pl", "gcc",
719 cc_version[0] if cc_version is not None else '',
720 cc_version[1] if cc_version is not None else ''
721 ],
722 haltOnFailure = True))
723
724 factory.addStep(SetPropertyFromCommand(
725 name = "g++",
726 property = "cxx_command",
727 description = "Finding g++ command",
728 command = [
729 "../findbin.pl", "g++",
730 cc_version[0] if cc_version is not None else '',
731 cc_version[1] if cc_version is not None else ''
732 ],
733 haltOnFailure = True))
734
735 # see if ccache is available
736 factory.addStep(SetPropertyFromCommand(
737 property = "ccache_command",
738 command = ["which", "ccache"],
739 description = "Testing for ccache command",
740 haltOnFailure = False,
741 flunkOnFailure = False,
742 warnOnFailure = False,
743 ))
744
745 # expire tree if needed
746 if tree_expire > 0:
747 factory.addStep(FileDownload(
748 name = "dlexpiresh",
749 doStepIf = IsExpireRequested,
750 mastersrc = scripts_dir + '/expire.sh',
751 workerdest = "../expire.sh",
752 mode = 0o755))
753
754 factory.addStep(ShellCommand(
755 name = "expire",
756 description = "Checking for build tree expiry",
757 command = ["./expire.sh", str(tree_expire)],
758 workdir = ".",
759 haltOnFailure = True,
760 doStepIf = IsExpireRequested,
761 timeout = 2400))
762
763 # cleanup.sh if needed
764 factory.addStep(FileDownload(
765 name = "dlcleanupsh",
766 mastersrc = scripts_dir + '/cleanup.sh',
767 workerdest = "../cleanup.sh",
768 mode = 0o755,
769 doStepIf = IsCleanupRequested))
770
771 factory.addStep(ShellCommand(
772 name = "cleanold",
773 description = "Cleaning previous builds",
774 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
775 workdir = ".",
776 haltOnFailure = True,
777 doStepIf = IsCleanupRequested,
778 timeout = 2400))
779
780 factory.addStep(ShellCommand(
781 name = "cleanup",
782 description = "Cleaning work area",
783 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
784 workdir = ".",
785 haltOnFailure = True,
786 doStepIf = IsCleanupRequested,
787 timeout = 2400))
788
789 # user-requested clean targets
790 for tuple in CleanTargetMap:
791 factory.addStep(ShellCommand(
792 name = tuple[1],
793 description = 'User-requested "make %s"' % tuple[1],
794 command = ["make", tuple[1], "V=s"],
795 env = MakeEnv(),
796 doStepIf = IsMakeCleanRequested(tuple[0])
797 ))
798
799 # Workaround bug when switching from a checked out tag back to a branch
800 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
801 factory.addStep(ShellCommand(
802 name = "gitcheckout",
803 description = "Ensure that Git HEAD is sane",
804 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
805 haltOnFailure = True))
806
807 # check out the source
808 # Git() runs:
809 # if repo doesn't exist: 'git clone repourl'
810 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
811 # 'git fetch -t repourl branch; git reset --hard revision'
812 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
813 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
814 factory.addStep(Git(
815 name = "gitclean",
816 repourl = repo_url,
817 branch = repo_branch,
818 mode = 'full',
819 method = 'clean',
820 locks = NetLockDl,
821 haltOnFailure = True,
822 doStepIf = IsGitCleanRequested,
823 ))
824
825 factory.addStep(Git(
826 name = "gitfresh",
827 repourl = repo_url,
828 branch = repo_branch,
829 mode = 'full',
830 method = 'fresh',
831 locks = NetLockDl,
832 haltOnFailure = True,
833 doStepIf = IsGitFreshRequested,
834 ))
835
836 # update remote refs
837 factory.addStep(ShellCommand(
838 name = "fetchrefs",
839 description = "Fetching Git remote refs",
840 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
841 haltOnFailure = True
842 ))
843
844 # switch to tag
845 factory.addStep(ShellCommand(
846 name = "switchtag",
847 description = "Checking out Git tag",
848 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
849 haltOnFailure = True,
850 doStepIf = IsTaggingRequested
851 ))
852
853 # Verify that Git HEAD points to a tag or branch
854 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
855 factory.addStep(ShellCommand(
856 name = "gitverify",
857 description = "Ensure that Git HEAD is pointing to a branch or tag",
858 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
859 haltOnFailure = True))
860
861 factory.addStep(ShellCommand(
862 name = "rmtmp",
863 description = "Remove tmp folder",
864 command=["rm", "-rf", "tmp/"]))
865
866 # feed
867 # factory.addStep(ShellCommand(
868 # name = "feedsconf",
869 # description = "Copy the feeds.conf",
870 # command='''cp ~/feeds.conf ./feeds.conf''' ))
871
872 # feed
873 factory.addStep(ShellCommand(
874 name = "rmfeedlinks",
875 description = "Remove feed symlinks",
876 command=["rm", "-rf", "package/feeds/"]))
877
878 factory.addStep(StringDownload(
879 name = "ccachecc",
880 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
881 workerdest = "../ccache_cc.sh",
882 mode = 0o755,
883 ))
884
885 factory.addStep(StringDownload(
886 name = "ccachecxx",
887 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
888 workerdest = "../ccache_cxx.sh",
889 mode = 0o755,
890 ))
891
892 # Git SSH
893 if git_ssh:
894 factory.addStep(StringDownload(
895 name = "dlgitclonekey",
896 s = git_ssh_key,
897 workerdest = "../git-clone.key",
898 mode = 0o600,
899 ))
900
901 factory.addStep(ShellCommand(
902 name = "patchfeedsconf",
903 description = "Patching feeds.conf",
904 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
905 haltOnFailure = True
906 ))
907
908 # feed
909 factory.addStep(ShellCommand(
910 name = "updatefeeds",
911 description = "Updating feeds",
912 command=["./scripts/feeds", "update"],
913 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
914 haltOnFailure = True,
915 locks = NetLockDl,
916 ))
917
918 # Git SSH
919 if git_ssh:
920 factory.addStep(ShellCommand(
921 name = "rmfeedsconf",
922 description = "Removing feeds.conf",
923 command=["rm", "feeds.conf"],
924 haltOnFailure = True
925 ))
926
927 # feed
928 factory.addStep(ShellCommand(
929 name = "installfeeds",
930 description = "Installing feeds",
931 command=["./scripts/feeds", "install", "-a"],
932 env = MakeEnv(tryccache=True),
933 haltOnFailure = True
934 ))
935
936 # seed config
937 if config_seed is not None:
938 factory.addStep(StringDownload(
939 name = "dlconfigseed",
940 s = config_seed + '\n',
941 workerdest = ".config",
942 mode = 0o644
943 ))
944
945 # configure
946 factory.addStep(ShellCommand(
947 name = "newconfig",
948 description = "Seeding .config",
949 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
950 ))
951
952 factory.addStep(ShellCommand(
953 name = "delbin",
954 description = "Removing output directory",
955 command = ["rm", "-rf", "bin/"]
956 ))
957
958 factory.addStep(ShellCommand(
959 name = "defconfig",
960 description = "Populating .config",
961 command = ["make", "defconfig"],
962 env = MakeEnv()
963 ))
964
965 # check arch
966 factory.addStep(ShellCommand(
967 name = "checkarch",
968 description = "Checking architecture",
969 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
970 logEnviron = False,
971 want_stdout = False,
972 want_stderr = False,
973 haltOnFailure = True
974 ))
975
976 # find libc suffix
977 factory.addStep(SetPropertyFromCommand(
978 name = "libc",
979 property = "libc",
980 description = "Finding libc suffix",
981 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
982
983 # install build key
984 if usign_key is not None:
985 factory.addStep(StringDownload(
986 name = "dlkeybuildpub",
987 s = UsignSec2Pub(usign_key, usign_comment),
988 workerdest = "key-build.pub",
989 mode = 0o600,
990 ))
991
992 factory.addStep(StringDownload(
993 name = "dlkeybuild",
994 s = "# fake private key",
995 workerdest = "key-build",
996 mode = 0o600,
997 ))
998
999 factory.addStep(StringDownload(
1000 name = "dlkeybuilducert",
1001 s = "# fake certificate",
1002 workerdest = "key-build.ucert",
1003 mode = 0o600,
1004 ))
1005
1006 # prepare dl
1007 factory.addStep(ShellCommand(
1008 name = "dldir",
1009 description = "Preparing dl/",
1010 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
1011 logEnviron = False,
1012 want_stdout = False
1013 ))
1014
1015 # prepare tar
1016 factory.addStep(ShellCommand(
1017 name = "dltar",
1018 description = "Building and installing GNU tar",
1019 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
1020 env = MakeEnv(tryccache=True),
1021 haltOnFailure = True
1022 ))
1023
1024 # populate dl
1025 factory.addStep(ShellCommand(
1026 name = "dlrun",
1027 description = "Populating dl/",
1028 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
1029 env = MakeEnv(),
1030 logEnviron = False,
1031 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
1032 ))
1033
1034 factory.addStep(ShellCommand(
1035 name = "cleanbase",
1036 description = "Cleaning base-files",
1037 command=["make", "package/base-files/clean", "V=s"]
1038 ))
1039
1040 # build
1041 factory.addStep(ShellCommand(
1042 name = "tools",
1043 description = "Building and installing tools",
1044 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
1045 env = MakeEnv(tryccache=True),
1046 haltOnFailure = True
1047 ))
1048
1049 factory.addStep(ShellCommand(
1050 name = "toolchain",
1051 description = "Building and installing toolchain",
1052 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1053 env = MakeEnv(),
1054 haltOnFailure = True
1055 ))
1056
1057 factory.addStep(ShellCommand(
1058 name = "kmods",
1059 description = "Building kmods",
1060 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1061 env = MakeEnv(),
1062 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1063 haltOnFailure = True
1064 ))
1065
1066 # find kernel version
1067 factory.addStep(SetPropertyFromCommand(
1068 name = "kernelversion",
1069 property = "kernelversion",
1070 description = "Finding the effective Kernel version",
1071 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1072 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1073 ))
1074
1075 factory.addStep(ShellCommand(
1076 name = "pkgclean",
1077 description = "Cleaning up package build",
1078 command=["make", "package/cleanup", "V=s"]
1079 ))
1080
1081 factory.addStep(ShellCommand(
1082 name = "pkgbuild",
1083 description = "Building packages",
1084 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1085 env = MakeEnv(),
1086 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1087 haltOnFailure = True
1088 ))
1089
1090 # factory.addStep(IfBuiltinShellCommand(
1091 factory.addStep(ShellCommand(
1092 name = "pkginstall",
1093 description = "Installing packages",
1094 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1095 env = MakeEnv(),
1096 haltOnFailure = True
1097 ))
1098
1099 factory.addStep(ShellCommand(
1100 name = "pkgindex",
1101 description = "Indexing packages",
1102 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1103 env = MakeEnv(),
1104 haltOnFailure = True
1105 ))
1106
1107 if enable_kmod_archive and embed_kmod_repository:
1108 # embed kmod repository. Must happen before 'images'
1109
1110 # find rootfs staging directory
1111 factory.addStep(SetPropertyFromCommand(
1112 name = "stageroot",
1113 property = "stageroot",
1114 description = "Finding the rootfs staging directory",
1115 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1116 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1117 want_stderr = False
1118 ))
1119
1120 factory.addStep(ShellCommand(
1121 name = "filesdir",
1122 description = "Creating file overlay directory",
1123 command=["mkdir", "-p", "files/etc/opkg"],
1124 haltOnFailure = True
1125 ))
1126
1127 factory.addStep(ShellCommand(
1128 name = "kmodconfig",
1129 description = "Embedding kmod repository configuration",
1130 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1131 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1132 haltOnFailure = True
1133 ))
1134
1135 #factory.addStep(IfBuiltinShellCommand(
1136 factory.addStep(ShellCommand(
1137 name = "images",
1138 description = "Building and installing images",
1139 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1140 env = MakeEnv(),
1141 haltOnFailure = True
1142 ))
1143
1144 factory.addStep(ShellCommand(
1145 name = "buildinfo",
1146 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1147 command = "make -j1 buildinfo V=s || true",
1148 env = MakeEnv(),
1149 haltOnFailure = True
1150 ))
1151
1152 factory.addStep(ShellCommand(
1153 name = "json_overview_image_info",
1154 description = "Generate profiles.json in target folder",
1155 command = "make -j1 json_overview_image_info V=s || true",
1156 env = MakeEnv(),
1157 haltOnFailure = True
1158 ))
1159
1160 factory.addStep(ShellCommand(
1161 name = "checksums",
1162 description = "Calculating checksums",
1163 command=["make", "-j1", "checksum", "V=s"],
1164 env = MakeEnv(),
1165 haltOnFailure = True
1166 ))
1167
1168 if enable_kmod_archive:
1169 factory.addStep(ShellCommand(
1170 name = "kmoddir",
1171 description = "Creating kmod directory",
1172 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1173 haltOnFailure = True
1174 ))
1175
1176 factory.addStep(ShellCommand(
1177 name = "kmodprepare",
1178 description = "Preparing kmod archive",
1179 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1180 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1181 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1182 haltOnFailure = True
1183 ))
1184
1185 factory.addStep(ShellCommand(
1186 name = "kmodindex",
1187 description = "Indexing kmod archive",
1188 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1189 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1190 env = MakeEnv(),
1191 haltOnFailure = True
1192 ))
1193
1194 # sign
1195 if ini.has_option("gpg", "key") or usign_key is not None:
1196 factory.addStep(MasterShellCommand(
1197 name = "signprepare",
1198 description = "Preparing temporary signing directory",
1199 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1200 haltOnFailure = True
1201 ))
1202
1203 factory.addStep(ShellCommand(
1204 name = "signpack",
1205 description = "Packing files to sign",
1206 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1207 haltOnFailure = True
1208 ))
1209
1210 factory.addStep(FileUpload(
1211 workersrc = "sign.tar.gz",
1212 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1213 haltOnFailure = True
1214 ))
1215
1216 factory.addStep(MasterShellCommand(
1217 name = "signfiles",
1218 description = "Signing files",
1219 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1220 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1221 haltOnFailure = True
1222 ))
1223
1224 factory.addStep(FileDownload(
1225 name = "dlsigntargz",
1226 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1227 workerdest = "sign.tar.gz",
1228 haltOnFailure = True
1229 ))
1230
1231 factory.addStep(ShellCommand(
1232 name = "signunpack",
1233 description = "Unpacking signed files",
1234 command = ["tar", "-xzf", "sign.tar.gz"],
1235 haltOnFailure = True
1236 ))
1237
1238 # upload
1239 factory.addStep(ShellCommand(
1240 name = "dirprepare",
1241 description = "Preparing upload directory structure",
1242 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1243 haltOnFailure = True
1244 ))
1245
1246 factory.addStep(ShellCommand(
1247 name = "linkprepare",
1248 description = "Preparing repository symlink",
1249 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1250 doStepIf = IsNoMasterBuild,
1251 haltOnFailure = True
1252 ))
1253
1254 if enable_kmod_archive:
1255 factory.addStep(ShellCommand(
1256 name = "kmoddirprepare",
1257 description = "Preparing kmod archive upload directory",
1258 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1259 haltOnFailure = True
1260 ))
1261
1262 factory.addStep(ShellCommand(
1263 name = "dirupload",
1264 description = "Uploading directory structure",
1265 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1266 env={'RSYNC_PASSWORD': rsync_bin_key},
1267 haltOnFailure = True,
1268 logEnviron = False,
1269 locks = NetLockUl,
1270 ))
1271
1272 # download remote sha256sums to 'target-sha256sums'
1273 factory.addStep(ShellCommand(
1274 name = "target-sha256sums",
1275 description = "Fetching remote sha256sums for target",
1276 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1277 env={'RSYNC_PASSWORD': rsync_bin_key},
1278 logEnviron = False,
1279 haltOnFailure = False,
1280 flunkOnFailure = False,
1281 warnOnFailure = False,
1282 ))
1283
1284 # build list of files to upload
1285 factory.addStep(FileDownload(
1286 name = "dlsha2rsyncpl",
1287 mastersrc = scripts_dir + '/sha2rsync.pl',
1288 workerdest = "../sha2rsync.pl",
1289 mode = 0o755,
1290 ))
1291
1292 factory.addStep(ShellCommand(
1293 name = "buildlist",
1294 description = "Building list of files to upload",
1295 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1296 haltOnFailure = True,
1297 ))
1298
1299 factory.addStep(FileDownload(
1300 name = "dlrsync.sh",
1301 mastersrc = scripts_dir + '/rsync.sh',
1302 workerdest = "../rsync.sh",
1303 mode = 0o755
1304 ))
1305
1306 # upload new files and update existing ones
1307 factory.addStep(ShellCommand(
1308 name = "targetupload",
1309 description = "Uploading target files",
1310 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1311 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1312 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1313 env={'RSYNC_PASSWORD': rsync_bin_key},
1314 haltOnFailure = True,
1315 logEnviron = False,
1316 ))
1317
1318 # delete files which don't exist locally
1319 factory.addStep(ShellCommand(
1320 name = "targetprune",
1321 description = "Pruning target files",
1322 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1323 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1324 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1325 env={'RSYNC_PASSWORD': rsync_bin_key},
1326 haltOnFailure = True,
1327 logEnviron = False,
1328 locks = NetLockUl,
1329 ))
1330
1331 if enable_kmod_archive:
1332 factory.addStep(ShellCommand(
1333 name = "kmodupload",
1334 description = "Uploading kmod archive",
1335 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1336 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1337 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1338 env={'RSYNC_PASSWORD': rsync_bin_key},
1339 haltOnFailure = True,
1340 logEnviron = False,
1341 locks = NetLockUl,
1342 ))
1343
1344 if rsync_src_url is not None:
1345 factory.addStep(ShellCommand(
1346 name = "sourcelist",
1347 description = "Finding source archives to upload",
1348 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1349 haltOnFailure = True
1350 ))
1351
1352 factory.addStep(ShellCommand(
1353 name = "sourceupload",
1354 description = "Uploading source archives",
1355 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1356 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1357 env={'RSYNC_PASSWORD': rsync_src_key},
1358 haltOnFailure = True,
1359 logEnviron = False,
1360 locks = NetLockUl,
1361 ))
1362
1363 if False:
1364 factory.addStep(ShellCommand(
1365 name = "packageupload",
1366 description = "Uploading package files",
1367 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1368 env={'RSYNC_PASSWORD': rsync_bin_key},
1369 haltOnFailure = False,
1370 flunkOnFailure = False,
1371 warnOnFailure = True,
1372 logEnviron = False,
1373 locks = NetLockUl,
1374 ))
1375
1376 # logs
1377 if False:
1378 factory.addStep(ShellCommand(
1379 name = "upload",
1380 description = "Uploading logs",
1381 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1382 env={'RSYNC_PASSWORD': rsync_bin_key},
1383 haltOnFailure = False,
1384 flunkOnFailure = False,
1385 warnOnFailure = True,
1386 alwaysRun = True,
1387 logEnviron = False,
1388 locks = NetLockUl,
1389 ))
1390
1391 factory.addStep(ShellCommand(
1392 name = "df",
1393 description = "Reporting disk usage",
1394 command=["df", "-h", "."],
1395 env={'LC_ALL': 'C'},
1396 haltOnFailure = False,
1397 flunkOnFailure = False,
1398 warnOnFailure = False,
1399 alwaysRun = True
1400 ))
1401
1402 factory.addStep(ShellCommand(
1403 name = "du",
1404 description = "Reporting estimated file space usage",
1405 command=["du", "-sh", "."],
1406 env={'LC_ALL': 'C'},
1407 haltOnFailure = False,
1408 flunkOnFailure = False,
1409 warnOnFailure = False,
1410 alwaysRun = True
1411 ))
1412
1413 factory.addStep(ShellCommand(
1414 name = "ccachestat",
1415 description = "Reporting ccache stats",
1416 command=["ccache", "-s"],
1417 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1418 want_stderr = False,
1419 haltOnFailure = False,
1420 flunkOnFailure = False,
1421 warnOnFailure = False,
1422 alwaysRun = True,
1423 ))
1424
1425 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1426
1427 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1428 force_factory.addStep(steps.Trigger(
1429 name = "trigger_%s" % target,
1430 description = "Triggering %s build" % target,
1431 schedulerNames = [ "trigger_%s" % target ],
1432 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1433 doStepIf = IsTargetSelected(target)
1434 ))
1435
1436
1437 ####### STATUS TARGETS
1438
1439 # 'status' is a list of Status Targets. The results of each build will be
1440 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1441 # including web pages, email senders, and IRC bots.
1442
1443 if ini.has_option("phase1", "status_bind"):
1444 c['www'] = {
1445 'port': ini.get("phase1", "status_bind"),
1446 'plugins': {
1447 'waterfall_view': True,
1448 'console_view': True,
1449 'grid_view': True
1450 }
1451 }
1452
1453 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1454 c['www']['auth'] = util.UserPasswordAuth([
1455 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1456 ])
1457 c['www']['authz'] = util.Authz(
1458 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1459 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1460 )
1461
1462 c['services'] = []
1463 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1464 irc_host = ini.get("irc", "host")
1465 irc_port = 6667
1466 irc_chan = ini.get("irc", "channel")
1467 irc_nick = ini.get("irc", "nickname")
1468 irc_pass = None
1469
1470 if ini.has_option("irc", "port"):
1471 irc_port = ini.getint("irc", "port")
1472
1473 if ini.has_option("irc", "password"):
1474 irc_pass = ini.get("irc", "password")
1475
1476 irc = reporters.IRC(irc_host, irc_nick,
1477 port = irc_port,
1478 password = irc_pass,
1479 channels = [ irc_chan ],
1480 notify_events = [ 'exception', 'problem', 'recovery' ]
1481 )
1482
1483 c['services'].append(irc)
1484
1485 c['revlink'] = util.RevlinkMatch([
1486 r'https://git.openwrt.org/openwrt/(.*).git'
1487 ],
1488 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1489
1490 ####### DB URL
1491
1492 c['db'] = {
1493 # This specifies what database buildbot uses to store its state. You can leave
1494 # this at its default for all but the largest installations.
1495 'db_url' : "sqlite:///state.sqlite",
1496 }
1497
1498 c['buildbotNetUsageData'] = None