2f4337f72559e9e163f59eefd805a9fc50396d62
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
124
125 # coalesce builds
126 c['collapseRequests'] = True
127
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
131 hour=6,
132 )]
133
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
141
142 @returns: datetime instance or None, via Deferred
143 """
144
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
148 [
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
151 ],
152 order=['-complete_at'], limit=1)
153 if not completed:
154 return
155
156 complete_at = completed[0]['complete_at']
157
158 last_build = yield bldr.master.data.get(
159 ('builds', ),
160 [
161 resultspec.Filter('builderid', 'eq', [bldrid]),
162 ],
163 order=['-started_at'], limit=1)
164
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
169
170 return complete_at
171
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
175 not skipped build.
176
177 @returns: list of sorted builders
178 """
179
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
182
183 def bldr_info(bldr):
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
186 return d
187
188 def bldr_sort(item):
189 (complete_at, bldr) = item
190
191 if not complete_at:
192 date = datetime.min
193 complete_at = date.replace(tzinfo=tzutc())
194
195 if is_building(bldr):
196 date = datetime.max
197 complete_at = date.replace(tzinfo=tzutc())
198
199 return (complete_at, bldr.name)
200
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
203
204 for r in results:
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
206
207 return [r[1] for r in results]
208
209 c['prioritizeBuilders'] = prioritizeBuilders
210
211 ####### CHANGESOURCES
212
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
215
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
218
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
221
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
225
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
228
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
232
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
235
236 usign_key = None
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
238
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
242
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
244
245
246 # find targets
247 targets = [ ]
248
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
251 else:
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
253
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
257
258 while True:
259 line = findtargets.stdout.readline()
260 if not line:
261 break
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
264
265
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
268
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
271 repo_url,
272 workdir=work_dir+'/work.git', branch=repo_branch,
273 pollinterval=300))
274
275 ####### SCHEDULERS
276
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
279
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
282 type = "list"
283 strict = True
284
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
288
289 @property
290 def choices(self):
291 taglist = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
293
294 if basever:
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
298
299 while True:
300 line = findtags.stdout.readline()
301
302 if not line:
303 break
304
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
306
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
309
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
312
313 self._choice_list = taglist
314
315 return self._choice_list
316
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
320 return s
321
322 c['schedulers'] = []
323 c['schedulers'].append(SingleBranchScheduler(
324 name = "all",
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
328
329 c['schedulers'].append(ForceScheduler(
330 name = "force",
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
334
335 codebases = [
336 util.CodebaseParameter(
337 "",
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
343 )
344 ],
345
346 reason = util.StringParameter(
347 name = "reason",
348 label = "Reason",
349 default = "Trigger build",
350 required = True,
351 size = 80
352 ),
353
354 properties = [
355 util.NestedParameter(
356 name="options",
357 label="Build Options",
358 layout="vertical",
359 fields=[
360 util.ChoiceStringParameter(
361 name = "target",
362 label = "Build target",
363 default = "all",
364 choices = [ "all" ] + targets
365 ),
366 TagChoiceParameter(
367 name = "tag",
368 label = "Build tag",
369 default = ""
370 )
371 ]
372 )
373 ]
374 ))
375
376 ####### BUILDERS
377
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
381
382 def IsSharedWorkdir(step):
383 return bool(step.getProperty("shared_wd"))
384
385 def IsCleanupRequested(step):
386 if IsSharedWorkdir(step):
387 return False
388 do_cleanup = step.getProperty("do_cleanup")
389 if do_cleanup:
390 return True
391 else:
392 return False
393
394 def IsExpireRequested(step):
395 if IsSharedWorkdir(step):
396 return False
397 else:
398 return not IsCleanupRequested(step)
399
400 def IsTaggingRequested(step):
401 val = step.getProperty("tag")
402 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
403 return True
404 else:
405 return False
406
407 def IsNoMasterBuild(step):
408 return repo_branch != "master"
409
410 def GetBaseVersion():
411 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
412 return repo_branch.split('-')[1]
413 else:
414 return "master"
415
416 @properties.renderer
417 def GetVersionPrefix(props):
418 basever = GetBaseVersion()
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
423 else:
424 return ""
425
426 @properties.renderer
427 def GetNumJobs(props):
428 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
429 return str(int(int(props["nproc"]) / props["max_builds"]))
430 else:
431 return "1"
432
433 @properties.renderer
434 def GetCCache(props):
435 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
436 return props["ccache_command"]
437 else:
438 return ""
439
440 def GetNextBuild(builder, requests):
441 for r in requests:
442 if r.properties and r.properties.hasProperty("tag"):
443 return r
444
445 r = requests[0]
446 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
447 return r
448
449 def MakeEnv(overrides=None, tryccache=False):
450 env = {
451 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
452 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
453 }
454 if tryccache:
455 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
456 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
457 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
458 else:
459 env['CC'] = env['CCC']
460 env['CXX'] = env['CCXX']
461 env['CCACHE'] = ''
462 if overrides is not None:
463 env.update(overrides)
464 return env
465
466 @properties.renderer
467 def NetLockDl(props):
468 lock = None
469 if props.hasProperty("dl_lock"):
470 lock = NetLocks[props["dl_lock"]]
471 if lock is not None:
472 return [lock.access('exclusive')]
473 else:
474 return []
475
476 @properties.renderer
477 def NetLockUl(props):
478 lock = None
479 if props.hasProperty("ul_lock"):
480 lock = NetLocks[props["ul_lock"]]
481 if lock is not None:
482 return [lock.access('exclusive')]
483 else:
484 return []
485
486 @util.renderer
487 def TagPropertyValue(props):
488 if props.hasProperty("options"):
489 options = props.getProperty("options")
490 if type(options) is dict:
491 return options.get("tag")
492 return None
493
494 def IsTargetSelected(target):
495 def CheckTargetProperty(step):
496 try:
497 options = step.getProperty("options")
498 if type(options) is dict:
499 selected_target = options.get("target", "all")
500 if selected_target != "all" and selected_target != target:
501 return False
502 except KeyError:
503 pass
504
505 return True
506
507 return CheckTargetProperty
508
509 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
510 try:
511 seckey = base64.b64decode(seckey)
512 except:
513 return None
514
515 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
516 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
517
518
519 c['builders'] = []
520
521 dlLock = locks.WorkerLock("worker_dl")
522
523 workerNames = [ ]
524
525 for worker in c['workers']:
526 workerNames.append(worker.workername)
527
528 force_factory = BuildFactory()
529
530 c['builders'].append(BuilderConfig(
531 name = "00_force_build",
532 workernames = workerNames,
533 factory = force_factory))
534
535 for target in targets:
536 ts = target.split('/')
537
538 factory = BuildFactory()
539
540 # setup shared work directory if required
541 factory.addStep(ShellCommand(
542 name = "sharedwd",
543 description = "Setting up shared work directory",
544 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
545 workdir = ".",
546 haltOnFailure = True,
547 doStepIf = IsSharedWorkdir))
548
549 # find number of cores
550 factory.addStep(SetPropertyFromCommand(
551 name = "nproc",
552 property = "nproc",
553 description = "Finding number of CPUs",
554 command = ["nproc"]))
555
556 # find gcc and g++ compilers
557 factory.addStep(FileDownload(
558 name = "dlfindbinpl",
559 mastersrc = scripts_dir + '/findbin.pl',
560 workerdest = "../findbin.pl",
561 mode = 0o755))
562
563 factory.addStep(SetPropertyFromCommand(
564 name = "gcc",
565 property = "cc_command",
566 description = "Finding gcc command",
567 command = [
568 "../findbin.pl", "gcc", "", "",
569 ],
570 haltOnFailure = True))
571
572 factory.addStep(SetPropertyFromCommand(
573 name = "g++",
574 property = "cxx_command",
575 description = "Finding g++ command",
576 command = [
577 "../findbin.pl", "g++", "", "",
578 ],
579 haltOnFailure = True))
580
581 # see if ccache is available
582 factory.addStep(SetPropertyFromCommand(
583 property = "ccache_command",
584 command = ["which", "ccache"],
585 description = "Testing for ccache command",
586 haltOnFailure = False,
587 flunkOnFailure = False,
588 warnOnFailure = False,
589 ))
590
591 # expire tree if needed
592 if tree_expire > 0:
593 factory.addStep(FileDownload(
594 name = "dlexpiresh",
595 doStepIf = IsExpireRequested,
596 mastersrc = scripts_dir + '/expire.sh',
597 workerdest = "../expire.sh",
598 mode = 0o755))
599
600 factory.addStep(ShellCommand(
601 name = "expire",
602 description = "Checking for build tree expiry",
603 command = ["./expire.sh", str(tree_expire)],
604 workdir = ".",
605 haltOnFailure = True,
606 doStepIf = IsExpireRequested,
607 timeout = 2400))
608
609 # cleanup.sh if needed
610 factory.addStep(FileDownload(
611 name = "dlcleanupsh",
612 mastersrc = scripts_dir + '/cleanup.sh',
613 workerdest = "../cleanup.sh",
614 mode = 0o755,
615 doStepIf = IsCleanupRequested))
616
617 factory.addStep(ShellCommand(
618 name = "cleanold",
619 description = "Cleaning previous builds",
620 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
621 workdir = ".",
622 haltOnFailure = True,
623 doStepIf = IsCleanupRequested,
624 timeout = 2400))
625
626 factory.addStep(ShellCommand(
627 name = "cleanup",
628 description = "Cleaning work area",
629 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
630 workdir = ".",
631 haltOnFailure = True,
632 doStepIf = IsCleanupRequested,
633 timeout = 2400))
634
635 # Workaround bug when switching from a checked out tag back to a branch
636 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
637 factory.addStep(ShellCommand(
638 name = "gitcheckout",
639 description = "Ensure that Git HEAD is sane",
640 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
641 haltOnFailure = True))
642
643 # check out the source
644 # Git() runs:
645 # if repo doesn't exist: 'git clone repourl'
646 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
647 # 'git fetch -t repourl branch; git reset --hard revision'
648 factory.addStep(Git(
649 name = "git",
650 repourl = repo_url,
651 branch = repo_branch,
652 mode = 'full',
653 method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
654 locks = NetLockDl,
655 haltOnFailure = True,
656 ))
657
658 # update remote refs
659 factory.addStep(ShellCommand(
660 name = "fetchrefs",
661 description = "Fetching Git remote refs",
662 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
663 haltOnFailure = True
664 ))
665
666 # switch to tag
667 factory.addStep(ShellCommand(
668 name = "switchtag",
669 description = "Checking out Git tag",
670 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
671 haltOnFailure = True,
672 doStepIf = IsTaggingRequested
673 ))
674
675 # Verify that Git HEAD points to a tag or branch
676 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
677 factory.addStep(ShellCommand(
678 name = "gitverify",
679 description = "Ensure that Git HEAD is pointing to a branch or tag",
680 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
681 haltOnFailure = True))
682
683 factory.addStep(ShellCommand(
684 name = "rmtmp",
685 description = "Remove tmp folder",
686 command=["rm", "-rf", "tmp/"]))
687
688 # feed
689 factory.addStep(ShellCommand(
690 name = "rmfeedlinks",
691 description = "Remove feed symlinks",
692 command=["rm", "-rf", "package/feeds/"]))
693
694 factory.addStep(StringDownload(
695 name = "ccachecc",
696 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
697 workerdest = "../ccache_cc.sh",
698 mode = 0o755,
699 ))
700
701 factory.addStep(StringDownload(
702 name = "ccachecxx",
703 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
704 workerdest = "../ccache_cxx.sh",
705 mode = 0o755,
706 ))
707
708 # feed
709 factory.addStep(ShellCommand(
710 name = "updatefeeds",
711 description = "Updating feeds",
712 command=["./scripts/feeds", "update"],
713 env = MakeEnv(tryccache=True),
714 haltOnFailure = True,
715 locks = NetLockDl,
716 ))
717
718 # feed
719 factory.addStep(ShellCommand(
720 name = "installfeeds",
721 description = "Installing feeds",
722 command=["./scripts/feeds", "install", "-a"],
723 env = MakeEnv(tryccache=True),
724 haltOnFailure = True
725 ))
726
727 # seed config
728 if config_seed is not None:
729 factory.addStep(StringDownload(
730 name = "dlconfigseed",
731 s = config_seed + '\n',
732 workerdest = ".config",
733 mode = 0o644
734 ))
735
736 # configure
737 factory.addStep(ShellCommand(
738 name = "newconfig",
739 description = "Seeding .config",
740 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
741 ))
742
743 factory.addStep(ShellCommand(
744 name = "delbin",
745 description = "Removing output directory",
746 command = ["rm", "-rf", "bin/"]
747 ))
748
749 factory.addStep(ShellCommand(
750 name = "defconfig",
751 description = "Populating .config",
752 command = ["make", "defconfig"],
753 env = MakeEnv()
754 ))
755
756 # check arch
757 factory.addStep(ShellCommand(
758 name = "checkarch",
759 description = "Checking architecture",
760 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
761 logEnviron = False,
762 want_stdout = False,
763 want_stderr = False,
764 haltOnFailure = True
765 ))
766
767 # find libc suffix
768 factory.addStep(SetPropertyFromCommand(
769 name = "libc",
770 property = "libc",
771 description = "Finding libc suffix",
772 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
773
774 # install build key
775 if usign_key is not None:
776 factory.addStep(StringDownload(
777 name = "dlkeybuildpub",
778 s = UsignSec2Pub(usign_key, usign_comment),
779 workerdest = "key-build.pub",
780 mode = 0o600,
781 ))
782
783 factory.addStep(StringDownload(
784 name = "dlkeybuild",
785 s = "# fake private key",
786 workerdest = "key-build",
787 mode = 0o600,
788 ))
789
790 factory.addStep(StringDownload(
791 name = "dlkeybuilducert",
792 s = "# fake certificate",
793 workerdest = "key-build.ucert",
794 mode = 0o600,
795 ))
796
797 # prepare dl
798 factory.addStep(ShellCommand(
799 name = "dldir",
800 description = "Preparing dl/",
801 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
802 logEnviron = False,
803 want_stdout = False
804 ))
805
806 # prepare tar
807 factory.addStep(ShellCommand(
808 name = "dltar",
809 description = "Building and installing GNU tar",
810 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
811 env = MakeEnv(tryccache=True),
812 haltOnFailure = True
813 ))
814
815 # populate dl
816 factory.addStep(ShellCommand(
817 name = "dlrun",
818 description = "Populating dl/",
819 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
820 env = MakeEnv(),
821 logEnviron = False,
822 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
823 ))
824
825 factory.addStep(ShellCommand(
826 name = "cleanbase",
827 description = "Cleaning base-files",
828 command=["make", "package/base-files/clean", "V=s"]
829 ))
830
831 # build
832 factory.addStep(ShellCommand(
833 name = "tools",
834 description = "Building and installing tools",
835 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
836 env = MakeEnv(tryccache=True),
837 haltOnFailure = True
838 ))
839
840 factory.addStep(ShellCommand(
841 name = "toolchain",
842 description = "Building and installing toolchain",
843 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
844 env = MakeEnv(),
845 haltOnFailure = True
846 ))
847
848 factory.addStep(ShellCommand(
849 name = "kmods",
850 description = "Building kmods",
851 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
852 env = MakeEnv(),
853 haltOnFailure = True
854 ))
855
856 # find kernel version
857 factory.addStep(SetPropertyFromCommand(
858 name = "kernelversion",
859 property = "kernelversion",
860 description = "Finding the effective Kernel version",
861 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
862 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
863 ))
864
865 factory.addStep(ShellCommand(
866 name = "pkgclean",
867 description = "Cleaning up package build",
868 command=["make", "package/cleanup", "V=s"]
869 ))
870
871 factory.addStep(ShellCommand(
872 name = "pkgbuild",
873 description = "Building packages",
874 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
875 env = MakeEnv(),
876 haltOnFailure = True
877 ))
878
879 factory.addStep(ShellCommand(
880 name = "pkginstall",
881 description = "Installing packages",
882 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
883 env = MakeEnv(),
884 haltOnFailure = True
885 ))
886
887 factory.addStep(ShellCommand(
888 name = "pkgindex",
889 description = "Indexing packages",
890 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
891 env = MakeEnv(),
892 haltOnFailure = True
893 ))
894
895 factory.addStep(ShellCommand(
896 name = "images",
897 description = "Building and installing images",
898 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
899 env = MakeEnv(),
900 haltOnFailure = True
901 ))
902
903 factory.addStep(ShellCommand(
904 name = "buildinfo",
905 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
906 command = "make -j1 buildinfo V=s || true",
907 env = MakeEnv(),
908 haltOnFailure = True
909 ))
910
911 factory.addStep(ShellCommand(
912 name = "json_overview_image_info",
913 description = "Generate profiles.json in target folder",
914 command = "make -j1 json_overview_image_info V=s || true",
915 env = MakeEnv(),
916 haltOnFailure = True
917 ))
918
919 factory.addStep(ShellCommand(
920 name = "checksums",
921 description = "Calculating checksums",
922 command=["make", "-j1", "checksum", "V=s"],
923 env = MakeEnv(),
924 haltOnFailure = True
925 ))
926
927 if enable_kmod_archive:
928 factory.addStep(ShellCommand(
929 name = "kmoddir",
930 description = "Creating kmod directory",
931 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
932 haltOnFailure = True
933 ))
934
935 factory.addStep(ShellCommand(
936 name = "kmodprepare",
937 description = "Preparing kmod archive",
938 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
939 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
940 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
941 haltOnFailure = True
942 ))
943
944 factory.addStep(ShellCommand(
945 name = "kmodindex",
946 description = "Indexing kmod archive",
947 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
948 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
949 env = MakeEnv(),
950 haltOnFailure = True
951 ))
952
953 # sign
954 if ini.has_option("gpg", "key") or usign_key is not None:
955 factory.addStep(MasterShellCommand(
956 name = "signprepare",
957 description = "Preparing temporary signing directory",
958 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
959 haltOnFailure = True
960 ))
961
962 factory.addStep(ShellCommand(
963 name = "signpack",
964 description = "Packing files to sign",
965 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
966 haltOnFailure = True
967 ))
968
969 factory.addStep(FileUpload(
970 workersrc = "sign.tar.gz",
971 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
972 haltOnFailure = True
973 ))
974
975 factory.addStep(MasterShellCommand(
976 name = "signfiles",
977 description = "Signing files",
978 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
979 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
980 haltOnFailure = True
981 ))
982
983 factory.addStep(FileDownload(
984 name = "dlsigntargz",
985 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
986 workerdest = "sign.tar.gz",
987 haltOnFailure = True
988 ))
989
990 factory.addStep(ShellCommand(
991 name = "signunpack",
992 description = "Unpacking signed files",
993 command = ["tar", "-xzf", "sign.tar.gz"],
994 haltOnFailure = True
995 ))
996
997 # upload
998 factory.addStep(ShellCommand(
999 name = "dirprepare",
1000 description = "Preparing upload directory structure",
1001 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1002 haltOnFailure = True
1003 ))
1004
1005 factory.addStep(ShellCommand(
1006 name = "linkprepare",
1007 description = "Preparing repository symlink",
1008 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1009 doStepIf = IsNoMasterBuild,
1010 haltOnFailure = True
1011 ))
1012
1013 if enable_kmod_archive:
1014 factory.addStep(ShellCommand(
1015 name = "kmoddirprepare",
1016 description = "Preparing kmod archive upload directory",
1017 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1018 haltOnFailure = True
1019 ))
1020
1021 factory.addStep(ShellCommand(
1022 name = "dirupload",
1023 description = "Uploading directory structure",
1024 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1025 env={'RSYNC_PASSWORD': rsync_bin_key},
1026 haltOnFailure = True,
1027 logEnviron = False,
1028 locks = NetLockUl,
1029 ))
1030
1031 # download remote sha256sums to 'target-sha256sums'
1032 factory.addStep(ShellCommand(
1033 name = "target-sha256sums",
1034 description = "Fetching remote sha256sums for target",
1035 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1036 env={'RSYNC_PASSWORD': rsync_bin_key},
1037 logEnviron = False,
1038 haltOnFailure = False,
1039 flunkOnFailure = False,
1040 warnOnFailure = False,
1041 ))
1042
1043 # build list of files to upload
1044 factory.addStep(FileDownload(
1045 name = "dlsha2rsyncpl",
1046 mastersrc = scripts_dir + '/sha2rsync.pl',
1047 workerdest = "../sha2rsync.pl",
1048 mode = 0o755,
1049 ))
1050
1051 factory.addStep(ShellCommand(
1052 name = "buildlist",
1053 description = "Building list of files to upload",
1054 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1055 haltOnFailure = True,
1056 ))
1057
1058 factory.addStep(FileDownload(
1059 name = "dlrsync.sh",
1060 mastersrc = scripts_dir + '/rsync.sh',
1061 workerdest = "../rsync.sh",
1062 mode = 0o755
1063 ))
1064
1065 # upload new files and update existing ones
1066 factory.addStep(ShellCommand(
1067 name = "targetupload",
1068 description = "Uploading target files",
1069 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1070 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1071 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1072 env={'RSYNC_PASSWORD': rsync_bin_key},
1073 haltOnFailure = True,
1074 logEnviron = False,
1075 ))
1076
1077 # delete files which don't exist locally
1078 factory.addStep(ShellCommand(
1079 name = "targetprune",
1080 description = "Pruning target files",
1081 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1082 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1083 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1084 env={'RSYNC_PASSWORD': rsync_bin_key},
1085 haltOnFailure = True,
1086 logEnviron = False,
1087 locks = NetLockUl,
1088 ))
1089
1090 if enable_kmod_archive:
1091 factory.addStep(ShellCommand(
1092 name = "kmodupload",
1093 description = "Uploading kmod archive",
1094 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1095 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1096 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1097 env={'RSYNC_PASSWORD': rsync_bin_key},
1098 haltOnFailure = True,
1099 logEnviron = False,
1100 locks = NetLockUl,
1101 ))
1102
1103 if rsync_src_url is not None:
1104 factory.addStep(ShellCommand(
1105 name = "sourcelist",
1106 description = "Finding source archives to upload",
1107 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1108 haltOnFailure = True
1109 ))
1110
1111 factory.addStep(ShellCommand(
1112 name = "sourceupload",
1113 description = "Uploading source archives",
1114 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1115 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1116 env={'RSYNC_PASSWORD': rsync_src_key},
1117 haltOnFailure = True,
1118 logEnviron = False,
1119 locks = NetLockUl,
1120 ))
1121
1122 factory.addStep(ShellCommand(
1123 name = "df",
1124 description = "Reporting disk usage",
1125 command=["df", "-h", "."],
1126 env={'LC_ALL': 'C'},
1127 haltOnFailure = False,
1128 flunkOnFailure = False,
1129 warnOnFailure = False,
1130 alwaysRun = True
1131 ))
1132
1133 factory.addStep(ShellCommand(
1134 name = "du",
1135 description = "Reporting estimated file space usage",
1136 command=["du", "-sh", "."],
1137 env={'LC_ALL': 'C'},
1138 haltOnFailure = False,
1139 flunkOnFailure = False,
1140 warnOnFailure = False,
1141 alwaysRun = True
1142 ))
1143
1144 factory.addStep(ShellCommand(
1145 name = "ccachestat",
1146 description = "Reporting ccache stats",
1147 command=["ccache", "-s"],
1148 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1149 want_stderr = False,
1150 haltOnFailure = False,
1151 flunkOnFailure = False,
1152 warnOnFailure = False,
1153 alwaysRun = True,
1154 ))
1155
1156 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1157
1158 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1159 force_factory.addStep(steps.Trigger(
1160 name = "trigger_%s" % target,
1161 description = "Triggering %s build" % target,
1162 schedulerNames = [ "trigger_%s" % target ],
1163 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1164 doStepIf = IsTargetSelected(target)
1165 ))
1166
1167
1168 ####### STATUS TARGETS
1169
1170 # 'status' is a list of Status Targets. The results of each build will be
1171 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1172 # including web pages, email senders, and IRC bots.
1173
1174 if "status_bind" in inip1:
1175 c['www'] = {
1176 'port': inip1.get("status_bind"),
1177 'plugins': {
1178 'waterfall_view': True,
1179 'console_view': True,
1180 'grid_view': True
1181 }
1182 }
1183
1184 if "status_user" in inip1 and "status_password" in inip1:
1185 c['www']['auth'] = util.UserPasswordAuth([
1186 (inip1.get("status_user"), inip1.get("status_password"))
1187 ])
1188 c['www']['authz'] = util.Authz(
1189 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1190 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1191 )
1192
1193 c['services'] = []
1194 if ini.has_section("irc"):
1195 iniirc = ini['irc']
1196 irc_host = iniirc.get("host", None)
1197 irc_port = iniirc.getint("port", 6667)
1198 irc_chan = iniirc.get("channel", None)
1199 irc_nick = iniirc.get("nickname", None)
1200 irc_pass = iniirc.get("password", None)
1201
1202 if irc_host and irc_nick and irc_chan:
1203 irc = reporters.IRC(irc_host, irc_nick,
1204 port = irc_port,
1205 password = irc_pass,
1206 channels = [ irc_chan ],
1207 notify_events = [ 'exception', 'problem', 'recovery' ]
1208 )
1209
1210 c['services'].append(irc)
1211
1212 c['revlink'] = util.RevlinkMatch([
1213 r'https://git.openwrt.org/openwrt/(.*).git'
1214 ],
1215 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1216
1217 ####### DB URL
1218
1219 c['db'] = {
1220 # This specifies what database buildbot uses to store its state. You can leave
1221 # this at its default for all but the largest installations.
1222 'db_url' : "sqlite:///state.sqlite",
1223 }
1224
1225 c['buildbotNetUsageData'] = None