phase1: remove unused 'git_ssh'
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
57
58 ####### PROJECT IDENTITY
59
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
63
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
66
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
71 # without some help.
72
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
74
75 ####### BUILDWORKERS
76
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
80
81 worker_port = 9989
82
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
123 # --master option)
124 c['protocols'] = {'pb': {'port': worker_port}}
125
126 # coalesce builds
127 c['collapseRequests'] = True
128
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
132 hour=6,
133 )]
134
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
142
143 @returns: datetime instance or None, via Deferred
144 """
145
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
149 [
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 ],
153 order=['-complete_at'], limit=1)
154 if not completed:
155 return
156
157 complete_at = completed[0]['complete_at']
158
159 last_build = yield bldr.master.data.get(
160 ('builds', ),
161 [
162 resultspec.Filter('builderid', 'eq', [bldrid]),
163 ],
164 order=['-started_at'], limit=1)
165
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
170
171 return complete_at
172
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
176 not skipped build.
177
178 @returns: list of sorted builders
179 """
180
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
183
184 def bldr_info(bldr):
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
187 return d
188
189 def bldr_sort(item):
190 (complete_at, bldr) = item
191
192 if not complete_at:
193 date = datetime.min
194 complete_at = date.replace(tzinfo=tzutc())
195
196 if is_building(bldr):
197 date = datetime.max
198 complete_at = date.replace(tzinfo=tzutc())
199
200 return (complete_at, bldr.name)
201
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
204
205 for r in results:
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207
208 return [r[1] for r in results]
209
210 c['prioritizeBuilders'] = prioritizeBuilders
211
212 ####### CHANGESOURCES
213
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = 0
217
218 cc_command = "gcc"
219 cxx_command = "g++"
220
221 config_seed = ""
222
223 if ini.has_option("phase1", "expire"):
224 tree_expire = ini.getint("phase1", "expire")
225
226 if ini.has_option("phase1", "config_seed"):
227 config_seed = ini.get("phase1", "config_seed")
228
229 repo_url = ini.get("repo", "url")
230 repo_branch = "master"
231
232 if ini.has_option("repo", "branch"):
233 repo_branch = ini.get("repo", "branch")
234
235 rsync_bin_url = ini.get("rsync", "binary_url")
236 rsync_bin_key = ini.get("rsync", "binary_password")
237 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
238
239 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
240 rsync_bin_defopts += ["--contimeout=20"]
241
242 rsync_src_url = None
243 rsync_src_key = None
244 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
245
246 if ini.has_option("rsync", "source_url"):
247 rsync_src_url = ini.get("rsync", "source_url")
248 rsync_src_key = ini.get("rsync", "source_password")
249
250 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
251 rsync_src_defopts += ["--contimeout=20"]
252
253 usign_key = None
254 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
255
256 if ini.has_option("usign", "key"):
257 usign_key = ini.get("usign", "key")
258
259 if ini.has_option("usign", "comment"):
260 usign_comment = ini.get("usign", "comment")
261
262 enable_kmod_archive = False
263 embed_kmod_repository = False
264
265 if ini.has_option("phase1", "kmod_archive"):
266 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
267
268 if ini.has_option("phase1", "kmod_repository"):
269 embed_kmod_repository = ini.getboolean("phase1", "kmod_repository")
270
271
272 # find targets
273 targets = [ ]
274
275 if not os.path.isdir(work_dir+'/source.git'):
276 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
277 else:
278 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
279
280 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
281 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
282 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
283
284 while True:
285 line = findtargets.stdout.readline()
286 if not line:
287 break
288 ta = line.decode().strip().split(' ')
289 targets.append(ta[0])
290
291
292 # the 'change_source' setting tells the buildmaster how it should find out
293 # about source code changes. Here we point to the buildbot clone of pyflakes.
294
295 c['change_source'] = []
296 c['change_source'].append(GitPoller(
297 repo_url,
298 workdir=work_dir+'/work.git', branch=repo_branch,
299 pollinterval=300))
300
301 ####### SCHEDULERS
302
303 # Configure the Schedulers, which decide how to react to incoming changes. In this
304 # case, just kick off a 'basebuild' build
305
306 class TagChoiceParameter(BaseParameter):
307 spec_attributes = ["strict", "choices"]
308 type = "list"
309 strict = True
310
311 def __init__(self, name, label=None, **kw):
312 super().__init__(name, label, **kw)
313 self._choice_list = []
314
315 @property
316 def choices(self):
317 taglist = []
318 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
319
320 if basever:
321 findtags = subprocess.Popen(
322 ['git', 'ls-remote', '--tags', repo_url],
323 stdout = subprocess.PIPE)
324
325 while True:
326 line = findtags.stdout.readline()
327
328 if not line:
329 break
330
331 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
332
333 if tagver and tagver[1].find(basever[1]) == 0:
334 taglist.append(tagver[1])
335
336 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
337 taglist.insert(0, '')
338
339 self._choice_list = taglist
340
341 return self._choice_list
342
343 def parse_from_arg(self, s):
344 if self.strict and s not in self._choice_list:
345 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
346 return s
347
348 c['schedulers'] = []
349 c['schedulers'].append(SingleBranchScheduler(
350 name = "all",
351 change_filter = filter.ChangeFilter(branch=repo_branch),
352 treeStableTimer = 60,
353 builderNames = targets))
354
355 c['schedulers'].append(ForceScheduler(
356 name = "force",
357 buttonName = "Force builds",
358 label = "Force build details",
359 builderNames = [ "00_force_build" ],
360
361 codebases = [
362 util.CodebaseParameter(
363 "",
364 label = "Repository",
365 branch = util.FixedParameter(name = "branch", default = ""),
366 revision = util.FixedParameter(name = "revision", default = ""),
367 repository = util.FixedParameter(name = "repository", default = ""),
368 project = util.FixedParameter(name = "project", default = "")
369 )
370 ],
371
372 reason = util.StringParameter(
373 name = "reason",
374 label = "Reason",
375 default = "Trigger build",
376 required = True,
377 size = 80
378 ),
379
380 properties = [
381 util.NestedParameter(
382 name="options",
383 label="Build Options",
384 layout="vertical",
385 fields=[
386 util.ChoiceStringParameter(
387 name = "target",
388 label = "Build target",
389 default = "all",
390 choices = [ "all" ] + targets
391 ),
392 TagChoiceParameter(
393 name = "tag",
394 label = "Build tag",
395 default = ""
396 )
397 ]
398 )
399 ]
400 ))
401
402 ####### BUILDERS
403
404 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
405 # what steps, and which workers can execute them. Note that any particular build will
406 # only take place on one worker.
407
408 CleanTargetMap = [
409 [ "tools", "tools/clean" ],
410 [ "chain", "toolchain/clean" ],
411 [ "linux", "target/linux/clean" ],
412 [ "dir", "dirclean" ],
413 [ "dist", "distclean" ]
414 ]
415
416 def IsMakeCleanRequested(pattern):
417 def CheckCleanProperty(step):
418 val = step.getProperty("clean")
419 if val and re.match(pattern, val):
420 return True
421 else:
422 return False
423
424 return CheckCleanProperty
425
426 def IsSharedWorkdir(step):
427 return bool(step.getProperty("shared_wd"))
428
429 def IsCleanupRequested(step):
430 if IsSharedWorkdir(step):
431 return False
432 do_cleanup = step.getProperty("do_cleanup")
433 if do_cleanup:
434 return True
435 else:
436 return False
437
438 def IsExpireRequested(step):
439 if IsSharedWorkdir(step):
440 return False
441 else:
442 return not IsCleanupRequested(step)
443
444 def IsGitFreshRequested(step):
445 do_cleanup = step.getProperty("do_cleanup")
446 if do_cleanup:
447 return True
448 else:
449 return False
450
451 def IsGitCleanRequested(step):
452 return not IsGitFreshRequested(step)
453
454 def IsTaggingRequested(step):
455 val = step.getProperty("tag")
456 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
457 return True
458 else:
459 return False
460
461 def IsNoTaggingRequested(step):
462 return not IsTaggingRequested(step)
463
464 def IsNoMasterBuild(step):
465 return repo_branch != "master"
466
467 def GetBaseVersion():
468 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
469 return repo_branch.split('-')[1]
470 else:
471 return "master"
472
473 @properties.renderer
474 def GetVersionPrefix(props):
475 basever = GetBaseVersion()
476 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
477 return "%s/" % props["tag"]
478 elif basever != "master":
479 return "%s-SNAPSHOT/" % basever
480 else:
481 return ""
482
483 @properties.renderer
484 def GetNumJobs(props):
485 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
486 return str(int(int(props["nproc"]) / props["max_builds"]))
487 else:
488 return "1"
489
490 @properties.renderer
491 def GetCC(props):
492 if props.hasProperty("cc_command"):
493 return props["cc_command"]
494 else:
495 return "gcc"
496
497 @properties.renderer
498 def GetCXX(props):
499 if props.hasProperty("cxx_command"):
500 return props["cxx_command"]
501 else:
502 return "g++"
503
504 @properties.renderer
505 def GetCwd(props):
506 if props.hasProperty("builddir"):
507 return props["builddir"]
508 elif props.hasProperty("workdir"):
509 return props["workdir"]
510 else:
511 return "/"
512
513 @properties.renderer
514 def GetCCache(props):
515 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
516 return props["ccache_command"]
517 else:
518 return ""
519
520 def GetNextBuild(builder, requests):
521 for r in requests:
522 if r.properties and r.properties.hasProperty("tag"):
523 return r
524
525 r = requests[0]
526 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
527 return r
528
529 def MakeEnv(overrides=None, tryccache=False):
530 env = {
531 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
532 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
533 }
534 if tryccache:
535 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
536 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
537 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
538 else:
539 env['CC'] = env['CCC']
540 env['CXX'] = env['CCXX']
541 env['CCACHE'] = ''
542 if overrides is not None:
543 env.update(overrides)
544 return env
545
546 @properties.renderer
547 def NetLockDl(props):
548 lock = None
549 if props.hasProperty("dl_lock"):
550 lock = NetLocks[props["dl_lock"]]
551 if lock is not None:
552 return [lock.access('exclusive')]
553 else:
554 return []
555
556 @properties.renderer
557 def NetLockUl(props):
558 lock = None
559 if props.hasProperty("ul_lock"):
560 lock = NetLocks[props["ul_lock"]]
561 if lock is not None:
562 return [lock.access('exclusive')]
563 else:
564 return []
565
566 @util.renderer
567 def TagPropertyValue(props):
568 if props.hasProperty("options"):
569 options = props.getProperty("options")
570 if type(options) is dict:
571 return options.get("tag")
572 return None
573
574 def IsTargetSelected(target):
575 def CheckTargetProperty(step):
576 try:
577 options = step.getProperty("options")
578 if type(options) is dict:
579 selected_target = options.get("target", "all")
580 if selected_target != "all" and selected_target != target:
581 return False
582 except KeyError:
583 pass
584
585 return True
586
587 return CheckTargetProperty
588
589 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
590 try:
591 seckey = base64.b64decode(seckey)
592 except:
593 return None
594
595 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
596 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
597
598
599 c['builders'] = []
600
601 dlLock = locks.WorkerLock("worker_dl")
602
603 checkBuiltin = re.sub('[\t\n ]+', ' ', """
604 checkBuiltin() {
605 local symbol op path file;
606 for file in $CHANGED_FILES; do
607 case "$file" in
608 package/*/*) : ;;
609 *) return 0 ;;
610 esac;
611 done;
612 while read symbol op path; do
613 case "$symbol" in package-*)
614 symbol="${symbol##*(}";
615 symbol="${symbol%)}";
616 for file in $CHANGED_FILES; do
617 case "$file" in "package/$path/"*)
618 grep -qsx "$symbol=y" .config && return 0
619 ;; esac;
620 done;
621 esac;
622 done < tmp/.packagedeps;
623 return 1;
624 }
625 """).strip()
626
627
628 class IfBuiltinShellCommand(ShellCommand):
629 def _quote(self, str):
630 if re.search("[^a-zA-Z0-9/_.-]", str):
631 return "'%s'" %(re.sub("'", "'\"'\"'", str))
632 return str
633
634 def setCommand(self, command):
635 if not isinstance(command, (str, unicode)):
636 command = ' '.join(map(self._quote, command))
637 self.command = [
638 '/bin/sh', '-c',
639 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
640 ]
641
642 def setupEnvironment(self, cmd):
643 workerEnv = self.workerEnvironment
644 if workerEnv is None:
645 workerEnv = { }
646 changedFiles = { }
647 for request in self.build.requests:
648 for source in request.sources:
649 for change in source.changes:
650 for file in change.files:
651 changedFiles[file] = True
652 fullSlaveEnv = workerEnv.copy()
653 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
654 cmd.args['env'] = fullSlaveEnv
655
656 workerNames = [ ]
657
658 for worker in c['workers']:
659 workerNames.append(worker.workername)
660
661 force_factory = BuildFactory()
662
663 c['builders'].append(BuilderConfig(
664 name = "00_force_build",
665 workernames = workerNames,
666 factory = force_factory))
667
668 for target in targets:
669 ts = target.split('/')
670
671 factory = BuildFactory()
672
673 # setup shared work directory if required
674 factory.addStep(ShellCommand(
675 name = "sharedwd",
676 description = "Setting up shared work directory",
677 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
678 workdir = ".",
679 haltOnFailure = True,
680 doStepIf = IsSharedWorkdir))
681
682 # find number of cores
683 factory.addStep(SetPropertyFromCommand(
684 name = "nproc",
685 property = "nproc",
686 description = "Finding number of CPUs",
687 command = ["nproc"]))
688
689 # find gcc and g++ compilers
690 factory.addStep(FileDownload(
691 name = "dlfindbinpl",
692 mastersrc = scripts_dir + '/findbin.pl',
693 workerdest = "../findbin.pl",
694 mode = 0o755))
695
696 factory.addStep(SetPropertyFromCommand(
697 name = "gcc",
698 property = "cc_command",
699 description = "Finding gcc command",
700 command = [
701 "../findbin.pl", "gcc", "", "",
702 ],
703 haltOnFailure = True))
704
705 factory.addStep(SetPropertyFromCommand(
706 name = "g++",
707 property = "cxx_command",
708 description = "Finding g++ command",
709 command = [
710 "../findbin.pl", "g++", "", "",
711 ],
712 haltOnFailure = True))
713
714 # see if ccache is available
715 factory.addStep(SetPropertyFromCommand(
716 property = "ccache_command",
717 command = ["which", "ccache"],
718 description = "Testing for ccache command",
719 haltOnFailure = False,
720 flunkOnFailure = False,
721 warnOnFailure = False,
722 ))
723
724 # expire tree if needed
725 if tree_expire > 0:
726 factory.addStep(FileDownload(
727 name = "dlexpiresh",
728 doStepIf = IsExpireRequested,
729 mastersrc = scripts_dir + '/expire.sh',
730 workerdest = "../expire.sh",
731 mode = 0o755))
732
733 factory.addStep(ShellCommand(
734 name = "expire",
735 description = "Checking for build tree expiry",
736 command = ["./expire.sh", str(tree_expire)],
737 workdir = ".",
738 haltOnFailure = True,
739 doStepIf = IsExpireRequested,
740 timeout = 2400))
741
742 # cleanup.sh if needed
743 factory.addStep(FileDownload(
744 name = "dlcleanupsh",
745 mastersrc = scripts_dir + '/cleanup.sh',
746 workerdest = "../cleanup.sh",
747 mode = 0o755,
748 doStepIf = IsCleanupRequested))
749
750 factory.addStep(ShellCommand(
751 name = "cleanold",
752 description = "Cleaning previous builds",
753 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
754 workdir = ".",
755 haltOnFailure = True,
756 doStepIf = IsCleanupRequested,
757 timeout = 2400))
758
759 factory.addStep(ShellCommand(
760 name = "cleanup",
761 description = "Cleaning work area",
762 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
763 workdir = ".",
764 haltOnFailure = True,
765 doStepIf = IsCleanupRequested,
766 timeout = 2400))
767
768 # user-requested clean targets
769 for tuple in CleanTargetMap:
770 factory.addStep(ShellCommand(
771 name = tuple[1],
772 description = 'User-requested "make %s"' % tuple[1],
773 command = ["make", tuple[1], "V=s"],
774 env = MakeEnv(),
775 doStepIf = IsMakeCleanRequested(tuple[0])
776 ))
777
778 # Workaround bug when switching from a checked out tag back to a branch
779 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
780 factory.addStep(ShellCommand(
781 name = "gitcheckout",
782 description = "Ensure that Git HEAD is sane",
783 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
784 haltOnFailure = True))
785
786 # check out the source
787 # Git() runs:
788 # if repo doesn't exist: 'git clone repourl'
789 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
790 # 'git fetch -t repourl branch; git reset --hard revision'
791 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
792 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
793 factory.addStep(Git(
794 name = "gitclean",
795 repourl = repo_url,
796 branch = repo_branch,
797 mode = 'full',
798 method = 'clean',
799 locks = NetLockDl,
800 haltOnFailure = True,
801 doStepIf = IsGitCleanRequested,
802 ))
803
804 factory.addStep(Git(
805 name = "gitfresh",
806 repourl = repo_url,
807 branch = repo_branch,
808 mode = 'full',
809 method = 'fresh',
810 locks = NetLockDl,
811 haltOnFailure = True,
812 doStepIf = IsGitFreshRequested,
813 ))
814
815 # update remote refs
816 factory.addStep(ShellCommand(
817 name = "fetchrefs",
818 description = "Fetching Git remote refs",
819 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
820 haltOnFailure = True
821 ))
822
823 # switch to tag
824 factory.addStep(ShellCommand(
825 name = "switchtag",
826 description = "Checking out Git tag",
827 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
828 haltOnFailure = True,
829 doStepIf = IsTaggingRequested
830 ))
831
832 # Verify that Git HEAD points to a tag or branch
833 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
834 factory.addStep(ShellCommand(
835 name = "gitverify",
836 description = "Ensure that Git HEAD is pointing to a branch or tag",
837 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
838 haltOnFailure = True))
839
840 factory.addStep(ShellCommand(
841 name = "rmtmp",
842 description = "Remove tmp folder",
843 command=["rm", "-rf", "tmp/"]))
844
845 # feed
846 # factory.addStep(ShellCommand(
847 # name = "feedsconf",
848 # description = "Copy the feeds.conf",
849 # command='''cp ~/feeds.conf ./feeds.conf''' ))
850
851 # feed
852 factory.addStep(ShellCommand(
853 name = "rmfeedlinks",
854 description = "Remove feed symlinks",
855 command=["rm", "-rf", "package/feeds/"]))
856
857 factory.addStep(StringDownload(
858 name = "ccachecc",
859 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
860 workerdest = "../ccache_cc.sh",
861 mode = 0o755,
862 ))
863
864 factory.addStep(StringDownload(
865 name = "ccachecxx",
866 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
867 workerdest = "../ccache_cxx.sh",
868 mode = 0o755,
869 ))
870
871 # feed
872 factory.addStep(ShellCommand(
873 name = "updatefeeds",
874 description = "Updating feeds",
875 command=["./scripts/feeds", "update"],
876 env = MakeEnv(tryccache=True),
877 haltOnFailure = True,
878 locks = NetLockDl,
879 ))
880
881 # feed
882 factory.addStep(ShellCommand(
883 name = "installfeeds",
884 description = "Installing feeds",
885 command=["./scripts/feeds", "install", "-a"],
886 env = MakeEnv(tryccache=True),
887 haltOnFailure = True
888 ))
889
890 # seed config
891 if config_seed is not None:
892 factory.addStep(StringDownload(
893 name = "dlconfigseed",
894 s = config_seed + '\n',
895 workerdest = ".config",
896 mode = 0o644
897 ))
898
899 # configure
900 factory.addStep(ShellCommand(
901 name = "newconfig",
902 description = "Seeding .config",
903 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
904 ))
905
906 factory.addStep(ShellCommand(
907 name = "delbin",
908 description = "Removing output directory",
909 command = ["rm", "-rf", "bin/"]
910 ))
911
912 factory.addStep(ShellCommand(
913 name = "defconfig",
914 description = "Populating .config",
915 command = ["make", "defconfig"],
916 env = MakeEnv()
917 ))
918
919 # check arch
920 factory.addStep(ShellCommand(
921 name = "checkarch",
922 description = "Checking architecture",
923 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
924 logEnviron = False,
925 want_stdout = False,
926 want_stderr = False,
927 haltOnFailure = True
928 ))
929
930 # find libc suffix
931 factory.addStep(SetPropertyFromCommand(
932 name = "libc",
933 property = "libc",
934 description = "Finding libc suffix",
935 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
936
937 # install build key
938 if usign_key is not None:
939 factory.addStep(StringDownload(
940 name = "dlkeybuildpub",
941 s = UsignSec2Pub(usign_key, usign_comment),
942 workerdest = "key-build.pub",
943 mode = 0o600,
944 ))
945
946 factory.addStep(StringDownload(
947 name = "dlkeybuild",
948 s = "# fake private key",
949 workerdest = "key-build",
950 mode = 0o600,
951 ))
952
953 factory.addStep(StringDownload(
954 name = "dlkeybuilducert",
955 s = "# fake certificate",
956 workerdest = "key-build.ucert",
957 mode = 0o600,
958 ))
959
960 # prepare dl
961 factory.addStep(ShellCommand(
962 name = "dldir",
963 description = "Preparing dl/",
964 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
965 logEnviron = False,
966 want_stdout = False
967 ))
968
969 # prepare tar
970 factory.addStep(ShellCommand(
971 name = "dltar",
972 description = "Building and installing GNU tar",
973 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
974 env = MakeEnv(tryccache=True),
975 haltOnFailure = True
976 ))
977
978 # populate dl
979 factory.addStep(ShellCommand(
980 name = "dlrun",
981 description = "Populating dl/",
982 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
983 env = MakeEnv(),
984 logEnviron = False,
985 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
986 ))
987
988 factory.addStep(ShellCommand(
989 name = "cleanbase",
990 description = "Cleaning base-files",
991 command=["make", "package/base-files/clean", "V=s"]
992 ))
993
994 # build
995 factory.addStep(ShellCommand(
996 name = "tools",
997 description = "Building and installing tools",
998 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
999 env = MakeEnv(tryccache=True),
1000 haltOnFailure = True
1001 ))
1002
1003 factory.addStep(ShellCommand(
1004 name = "toolchain",
1005 description = "Building and installing toolchain",
1006 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1007 env = MakeEnv(),
1008 haltOnFailure = True
1009 ))
1010
1011 factory.addStep(ShellCommand(
1012 name = "kmods",
1013 description = "Building kmods",
1014 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1015 env = MakeEnv(),
1016 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1017 haltOnFailure = True
1018 ))
1019
1020 # find kernel version
1021 factory.addStep(SetPropertyFromCommand(
1022 name = "kernelversion",
1023 property = "kernelversion",
1024 description = "Finding the effective Kernel version",
1025 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1026 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1027 ))
1028
1029 factory.addStep(ShellCommand(
1030 name = "pkgclean",
1031 description = "Cleaning up package build",
1032 command=["make", "package/cleanup", "V=s"]
1033 ))
1034
1035 factory.addStep(ShellCommand(
1036 name = "pkgbuild",
1037 description = "Building packages",
1038 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1039 env = MakeEnv(),
1040 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1041 haltOnFailure = True
1042 ))
1043
1044 # factory.addStep(IfBuiltinShellCommand(
1045 factory.addStep(ShellCommand(
1046 name = "pkginstall",
1047 description = "Installing packages",
1048 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1049 env = MakeEnv(),
1050 haltOnFailure = True
1051 ))
1052
1053 factory.addStep(ShellCommand(
1054 name = "pkgindex",
1055 description = "Indexing packages",
1056 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1057 env = MakeEnv(),
1058 haltOnFailure = True
1059 ))
1060
1061 if enable_kmod_archive and embed_kmod_repository:
1062 # embed kmod repository. Must happen before 'images'
1063
1064 # find rootfs staging directory
1065 factory.addStep(SetPropertyFromCommand(
1066 name = "stageroot",
1067 property = "stageroot",
1068 description = "Finding the rootfs staging directory",
1069 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
1070 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) },
1071 want_stderr = False
1072 ))
1073
1074 factory.addStep(ShellCommand(
1075 name = "filesdir",
1076 description = "Creating file overlay directory",
1077 command=["mkdir", "-p", "files/etc/opkg"],
1078 haltOnFailure = True
1079 ))
1080
1081 factory.addStep(ShellCommand(
1082 name = "kmodconfig",
1083 description = "Embedding kmod repository configuration",
1084 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
1085 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
1086 haltOnFailure = True
1087 ))
1088
1089 #factory.addStep(IfBuiltinShellCommand(
1090 factory.addStep(ShellCommand(
1091 name = "images",
1092 description = "Building and installing images",
1093 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1094 env = MakeEnv(),
1095 haltOnFailure = True
1096 ))
1097
1098 factory.addStep(ShellCommand(
1099 name = "buildinfo",
1100 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1101 command = "make -j1 buildinfo V=s || true",
1102 env = MakeEnv(),
1103 haltOnFailure = True
1104 ))
1105
1106 factory.addStep(ShellCommand(
1107 name = "json_overview_image_info",
1108 description = "Generate profiles.json in target folder",
1109 command = "make -j1 json_overview_image_info V=s || true",
1110 env = MakeEnv(),
1111 haltOnFailure = True
1112 ))
1113
1114 factory.addStep(ShellCommand(
1115 name = "checksums",
1116 description = "Calculating checksums",
1117 command=["make", "-j1", "checksum", "V=s"],
1118 env = MakeEnv(),
1119 haltOnFailure = True
1120 ))
1121
1122 if enable_kmod_archive:
1123 factory.addStep(ShellCommand(
1124 name = "kmoddir",
1125 description = "Creating kmod directory",
1126 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1127 haltOnFailure = True
1128 ))
1129
1130 factory.addStep(ShellCommand(
1131 name = "kmodprepare",
1132 description = "Preparing kmod archive",
1133 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1134 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1135 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1136 haltOnFailure = True
1137 ))
1138
1139 factory.addStep(ShellCommand(
1140 name = "kmodindex",
1141 description = "Indexing kmod archive",
1142 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1143 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1144 env = MakeEnv(),
1145 haltOnFailure = True
1146 ))
1147
1148 # sign
1149 if ini.has_option("gpg", "key") or usign_key is not None:
1150 factory.addStep(MasterShellCommand(
1151 name = "signprepare",
1152 description = "Preparing temporary signing directory",
1153 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1154 haltOnFailure = True
1155 ))
1156
1157 factory.addStep(ShellCommand(
1158 name = "signpack",
1159 description = "Packing files to sign",
1160 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1161 haltOnFailure = True
1162 ))
1163
1164 factory.addStep(FileUpload(
1165 workersrc = "sign.tar.gz",
1166 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1167 haltOnFailure = True
1168 ))
1169
1170 factory.addStep(MasterShellCommand(
1171 name = "signfiles",
1172 description = "Signing files",
1173 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1174 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1175 haltOnFailure = True
1176 ))
1177
1178 factory.addStep(FileDownload(
1179 name = "dlsigntargz",
1180 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1181 workerdest = "sign.tar.gz",
1182 haltOnFailure = True
1183 ))
1184
1185 factory.addStep(ShellCommand(
1186 name = "signunpack",
1187 description = "Unpacking signed files",
1188 command = ["tar", "-xzf", "sign.tar.gz"],
1189 haltOnFailure = True
1190 ))
1191
1192 # upload
1193 factory.addStep(ShellCommand(
1194 name = "dirprepare",
1195 description = "Preparing upload directory structure",
1196 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1197 haltOnFailure = True
1198 ))
1199
1200 factory.addStep(ShellCommand(
1201 name = "linkprepare",
1202 description = "Preparing repository symlink",
1203 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1204 doStepIf = IsNoMasterBuild,
1205 haltOnFailure = True
1206 ))
1207
1208 if enable_kmod_archive:
1209 factory.addStep(ShellCommand(
1210 name = "kmoddirprepare",
1211 description = "Preparing kmod archive upload directory",
1212 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1213 haltOnFailure = True
1214 ))
1215
1216 factory.addStep(ShellCommand(
1217 name = "dirupload",
1218 description = "Uploading directory structure",
1219 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1220 env={'RSYNC_PASSWORD': rsync_bin_key},
1221 haltOnFailure = True,
1222 logEnviron = False,
1223 locks = NetLockUl,
1224 ))
1225
1226 # download remote sha256sums to 'target-sha256sums'
1227 factory.addStep(ShellCommand(
1228 name = "target-sha256sums",
1229 description = "Fetching remote sha256sums for target",
1230 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1231 env={'RSYNC_PASSWORD': rsync_bin_key},
1232 logEnviron = False,
1233 haltOnFailure = False,
1234 flunkOnFailure = False,
1235 warnOnFailure = False,
1236 ))
1237
1238 # build list of files to upload
1239 factory.addStep(FileDownload(
1240 name = "dlsha2rsyncpl",
1241 mastersrc = scripts_dir + '/sha2rsync.pl',
1242 workerdest = "../sha2rsync.pl",
1243 mode = 0o755,
1244 ))
1245
1246 factory.addStep(ShellCommand(
1247 name = "buildlist",
1248 description = "Building list of files to upload",
1249 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1250 haltOnFailure = True,
1251 ))
1252
1253 factory.addStep(FileDownload(
1254 name = "dlrsync.sh",
1255 mastersrc = scripts_dir + '/rsync.sh',
1256 workerdest = "../rsync.sh",
1257 mode = 0o755
1258 ))
1259
1260 # upload new files and update existing ones
1261 factory.addStep(ShellCommand(
1262 name = "targetupload",
1263 description = "Uploading target files",
1264 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1265 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1266 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1267 env={'RSYNC_PASSWORD': rsync_bin_key},
1268 haltOnFailure = True,
1269 logEnviron = False,
1270 ))
1271
1272 # delete files which don't exist locally
1273 factory.addStep(ShellCommand(
1274 name = "targetprune",
1275 description = "Pruning target files",
1276 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1277 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1278 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1279 env={'RSYNC_PASSWORD': rsync_bin_key},
1280 haltOnFailure = True,
1281 logEnviron = False,
1282 locks = NetLockUl,
1283 ))
1284
1285 if enable_kmod_archive:
1286 factory.addStep(ShellCommand(
1287 name = "kmodupload",
1288 description = "Uploading kmod archive",
1289 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1290 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1291 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1292 env={'RSYNC_PASSWORD': rsync_bin_key},
1293 haltOnFailure = True,
1294 logEnviron = False,
1295 locks = NetLockUl,
1296 ))
1297
1298 if rsync_src_url is not None:
1299 factory.addStep(ShellCommand(
1300 name = "sourcelist",
1301 description = "Finding source archives to upload",
1302 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1303 haltOnFailure = True
1304 ))
1305
1306 factory.addStep(ShellCommand(
1307 name = "sourceupload",
1308 description = "Uploading source archives",
1309 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1310 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1311 env={'RSYNC_PASSWORD': rsync_src_key},
1312 haltOnFailure = True,
1313 logEnviron = False,
1314 locks = NetLockUl,
1315 ))
1316
1317 if False:
1318 factory.addStep(ShellCommand(
1319 name = "packageupload",
1320 description = "Uploading package files",
1321 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1322 env={'RSYNC_PASSWORD': rsync_bin_key},
1323 haltOnFailure = False,
1324 flunkOnFailure = False,
1325 warnOnFailure = True,
1326 logEnviron = False,
1327 locks = NetLockUl,
1328 ))
1329
1330 # logs
1331 if False:
1332 factory.addStep(ShellCommand(
1333 name = "upload",
1334 description = "Uploading logs",
1335 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1336 env={'RSYNC_PASSWORD': rsync_bin_key},
1337 haltOnFailure = False,
1338 flunkOnFailure = False,
1339 warnOnFailure = True,
1340 alwaysRun = True,
1341 logEnviron = False,
1342 locks = NetLockUl,
1343 ))
1344
1345 factory.addStep(ShellCommand(
1346 name = "df",
1347 description = "Reporting disk usage",
1348 command=["df", "-h", "."],
1349 env={'LC_ALL': 'C'},
1350 haltOnFailure = False,
1351 flunkOnFailure = False,
1352 warnOnFailure = False,
1353 alwaysRun = True
1354 ))
1355
1356 factory.addStep(ShellCommand(
1357 name = "du",
1358 description = "Reporting estimated file space usage",
1359 command=["du", "-sh", "."],
1360 env={'LC_ALL': 'C'},
1361 haltOnFailure = False,
1362 flunkOnFailure = False,
1363 warnOnFailure = False,
1364 alwaysRun = True
1365 ))
1366
1367 factory.addStep(ShellCommand(
1368 name = "ccachestat",
1369 description = "Reporting ccache stats",
1370 command=["ccache", "-s"],
1371 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1372 want_stderr = False,
1373 haltOnFailure = False,
1374 flunkOnFailure = False,
1375 warnOnFailure = False,
1376 alwaysRun = True,
1377 ))
1378
1379 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1380
1381 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1382 force_factory.addStep(steps.Trigger(
1383 name = "trigger_%s" % target,
1384 description = "Triggering %s build" % target,
1385 schedulerNames = [ "trigger_%s" % target ],
1386 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1387 doStepIf = IsTargetSelected(target)
1388 ))
1389
1390
1391 ####### STATUS TARGETS
1392
1393 # 'status' is a list of Status Targets. The results of each build will be
1394 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1395 # including web pages, email senders, and IRC bots.
1396
1397 if ini.has_option("phase1", "status_bind"):
1398 c['www'] = {
1399 'port': ini.get("phase1", "status_bind"),
1400 'plugins': {
1401 'waterfall_view': True,
1402 'console_view': True,
1403 'grid_view': True
1404 }
1405 }
1406
1407 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1408 c['www']['auth'] = util.UserPasswordAuth([
1409 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1410 ])
1411 c['www']['authz'] = util.Authz(
1412 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1413 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1414 )
1415
1416 c['services'] = []
1417 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1418 irc_host = ini.get("irc", "host")
1419 irc_port = 6667
1420 irc_chan = ini.get("irc", "channel")
1421 irc_nick = ini.get("irc", "nickname")
1422 irc_pass = None
1423
1424 if ini.has_option("irc", "port"):
1425 irc_port = ini.getint("irc", "port")
1426
1427 if ini.has_option("irc", "password"):
1428 irc_pass = ini.get("irc", "password")
1429
1430 irc = reporters.IRC(irc_host, irc_nick,
1431 port = irc_port,
1432 password = irc_pass,
1433 channels = [ irc_chan ],
1434 notify_events = [ 'exception', 'problem', 'recovery' ]
1435 )
1436
1437 c['services'].append(irc)
1438
1439 c['revlink'] = util.RevlinkMatch([
1440 r'https://git.openwrt.org/openwrt/(.*).git'
1441 ],
1442 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1443
1444 ####### DB URL
1445
1446 c['db'] = {
1447 # This specifies what database buildbot uses to store its state. You can leave
1448 # this at its default for all but the largest installations.
1449 'db_url' : "sqlite:///state.sqlite",
1450 }
1451
1452 c['buildbotNetUsageData'] = None