phase1: replace 'repo_branch' with 'branch' prop in factory
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 config_seed = inip1.get("config_seed", "")
64
65 repo_url = ini['repo'].get("url")
66 repo_branch = ini['repo'].get("branch", "master")
67
68 rsync_bin_url = ini['rsync'].get("binary_url")
69 rsync_bin_key = ini['rsync'].get("binary_password")
70 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
71
72 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
73 rsync_bin_defopts += ["--contimeout=20"]
74
75 rsync_src_url = ini['rsync'].get("source_url")
76 rsync_src_key = ini['rsync'].get("source_password")
77 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
78
79 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
80 rsync_src_defopts += ["--contimeout=20"]
81
82 usign_key = None
83 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
84
85 if ini.has_section("usign"):
86 usign_key = ini['usign'].get("key")
87 usign_comment = ini['usign'].get("comment", usign_comment)
88
89 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
90
91 # PB port can be either a numeric port or a connection string
92 pb_port = inip1.get("port") or 9989
93
94 # This is the dictionary that the buildmaster pays attention to. We also use
95 # a shorter alias to save typing.
96 c = BuildmasterConfig = {}
97
98 ####### PROJECT IDENTITY
99
100 # the 'title' string will appear at the top of this buildbot
101 # installation's html.WebStatus home page (linked to the
102 # 'titleURL') and is embedded in the title of the waterfall HTML page.
103
104 c['title'] = ini['general'].get("title")
105 c['titleURL'] = ini['general'].get("title_url")
106
107 # the 'buildbotURL' string should point to the location where the buildbot's
108 # internal web server (usually the html.WebStatus page) is visible. This
109 # typically uses the port number set in the Waterfall 'status' entry, but
110 # with an externally-visible host name which the buildbot cannot figure out
111 # without some help.
112
113 c['buildbotURL'] = inip1.get("buildbot_url")
114
115 ####### BUILDWORKERS
116
117 # The 'workers' list defines the set of recognized buildworkers. Each element is
118 # a Worker object, specifying a unique worker name and password. The same
119 # worker name and password must be configured on the worker.
120
121 c['workers'] = []
122 NetLocks = dict()
123
124 for section in ini.sections():
125 if section.startswith("worker "):
126 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
127 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
128 sl_props = { 'dl_lock':None, 'ul_lock':None }
129 name = ini.get(section, "name")
130 password = ini.get(section, "password")
131 if ini.has_option(section, "dl_lock"):
132 lockname = ini.get(section, "dl_lock")
133 sl_props['dl_lock'] = lockname
134 if lockname not in NetLocks:
135 NetLocks[lockname] = locks.MasterLock(lockname)
136 if ini.has_option(section, "ul_lock"):
137 lockname = ini.get(section, "ul_lock")
138 sl_props['ul_lock'] = lockname
139 if lockname not in NetLocks:
140 NetLocks[lockname] = locks.MasterLock(lockname)
141 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
142
143 c['protocols'] = {'pb': {'port': pb_port}}
144
145 # coalesce builds
146 c['collapseRequests'] = True
147
148 # Reduce amount of backlog data
149 c['configurators'] = [util.JanitorConfigurator(
150 logHorizon=timedelta(days=3),
151 hour=6,
152 )]
153
154 @defer.inlineCallbacks
155 def getNewestCompleteTime(bldr):
156 """Returns the complete_at of the latest completed and not SKIPPED
157 build request for this builder, or None if there are no such build
158 requests. We need to filter out SKIPPED requests because we're
159 using collapseRequests=True which is unfortunately marking all
160 previous requests as complete when new buildset is created.
161
162 @returns: datetime instance or None, via Deferred
163 """
164
165 bldrid = yield bldr.getBuilderId()
166 completed = yield bldr.master.data.get(
167 ('builders', bldrid, 'buildrequests'),
168 [
169 resultspec.Filter('complete', 'eq', [True]),
170 resultspec.Filter('results', 'ne', [results.SKIPPED]),
171 ],
172 order=['-complete_at'], limit=1)
173 if not completed:
174 return
175
176 complete_at = completed[0]['complete_at']
177
178 last_build = yield bldr.master.data.get(
179 ('builds', ),
180 [
181 resultspec.Filter('builderid', 'eq', [bldrid]),
182 ],
183 order=['-started_at'], limit=1)
184
185 if last_build and last_build[0]:
186 last_complete_at = last_build[0]['complete_at']
187 if last_complete_at and (last_complete_at > complete_at):
188 return last_complete_at
189
190 return complete_at
191
192 @defer.inlineCallbacks
193 def prioritizeBuilders(master, builders):
194 """Returns sorted list of builders by their last timestamp of completed and
195 not skipped build.
196
197 @returns: list of sorted builders
198 """
199
200 def is_building(bldr):
201 return bool(bldr.building) or bool(bldr.old_building)
202
203 def bldr_info(bldr):
204 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
205 d.addCallback(lambda complete_at: (complete_at, bldr))
206 return d
207
208 def bldr_sort(item):
209 (complete_at, bldr) = item
210
211 if not complete_at:
212 date = datetime.min
213 complete_at = date.replace(tzinfo=tzutc())
214
215 if is_building(bldr):
216 date = datetime.max
217 complete_at = date.replace(tzinfo=tzutc())
218
219 return (complete_at, bldr.name)
220
221 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
222 results.sort(key=bldr_sort)
223
224 for r in results:
225 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
226
227 return [r[1] for r in results]
228
229 c['prioritizeBuilders'] = prioritizeBuilders
230
231 ####### CHANGESOURCES
232
233
234 # find targets
235 targets = [ ]
236
237 def populateTargets():
238 sourcegit = work_dir + '/source.git'
239 if os.path.isdir(sourcegit):
240 subprocess.call(["rm", "-rf", sourcegit])
241
242 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, sourcegit])
243
244 os.makedirs(sourcegit + '/tmp', exist_ok=True)
245 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
246 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
247
248 while True:
249 line = findtargets.stdout.readline()
250 if not line:
251 break
252 ta = line.decode().strip().split(' ')
253 targets.append(ta[0])
254
255 subprocess.call(["rm", "-rf", sourcegit])
256
257 populateTargets()
258
259 # the 'change_source' setting tells the buildmaster how it should find out
260 # about source code changes. Here we point to the buildbot clone of pyflakes.
261
262 c['change_source'] = []
263 c['change_source'].append(GitPoller(
264 repo_url,
265 workdir=work_dir+'/work.git', branch=repo_branch,
266 pollinterval=300))
267
268 ####### SCHEDULERS
269
270 # Configure the Schedulers, which decide how to react to incoming changes. In this
271 # case, just kick off a 'basebuild' build
272
273 class TagChoiceParameter(BaseParameter):
274 spec_attributes = ["strict", "choices"]
275 type = "list"
276 strict = True
277
278 def __init__(self, name, label=None, **kw):
279 super().__init__(name, label, **kw)
280 self._choice_list = []
281
282 @property
283 def choices(self):
284 taglist = []
285 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
286
287 if basever:
288 findtags = subprocess.Popen(
289 ['git', 'ls-remote', '--tags', repo_url],
290 stdout = subprocess.PIPE)
291
292 while True:
293 line = findtags.stdout.readline()
294
295 if not line:
296 break
297
298 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
299
300 if tagver and tagver[1].find(basever[1]) == 0:
301 taglist.append(tagver[1])
302
303 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
304 taglist.insert(0, '')
305
306 self._choice_list = taglist
307
308 return self._choice_list
309
310 def parse_from_arg(self, s):
311 if self.strict and s not in self._choice_list:
312 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
313 return s
314
315 c['schedulers'] = []
316 c['schedulers'].append(SingleBranchScheduler(
317 name = "all",
318 change_filter = filter.ChangeFilter(branch=repo_branch),
319 treeStableTimer = 60,
320 builderNames = targets))
321
322 c['schedulers'].append(ForceScheduler(
323 name = "force",
324 buttonName = "Force builds",
325 label = "Force build details",
326 builderNames = [ "00_force_build" ],
327
328 codebases = [
329 util.CodebaseParameter(
330 "",
331 label = "Repository",
332 branch = util.FixedParameter(name = "branch", default = ""),
333 revision = util.FixedParameter(name = "revision", default = ""),
334 repository = util.FixedParameter(name = "repository", default = ""),
335 project = util.FixedParameter(name = "project", default = "")
336 )
337 ],
338
339 reason = util.StringParameter(
340 name = "reason",
341 label = "Reason",
342 default = "Trigger build",
343 required = True,
344 size = 80
345 ),
346
347 properties = [
348 util.NestedParameter(
349 name="options",
350 label="Build Options",
351 layout="vertical",
352 fields=[
353 util.ChoiceStringParameter(
354 name = "target",
355 label = "Build target",
356 default = "all",
357 choices = [ "all" ] + targets
358 ),
359 TagChoiceParameter(
360 name = "tag",
361 label = "Build tag",
362 default = ""
363 )
364 ]
365 )
366 ]
367 ))
368
369 ####### BUILDERS
370
371 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
372 # what steps, and which workers can execute them. Note that any particular build will
373 # only take place on one worker.
374
375 def IsTaggingRequested(step):
376 val = step.getProperty("tag")
377 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
378 return True
379 else:
380 return False
381
382 def IsNoMasterBuild(step):
383 return step.getProperty("branch") != "master"
384
385 def GetBaseVersion(branch):
386 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
387 return branch.split('-')[1]
388 else:
389 return "master"
390
391 @properties.renderer
392 def GetVersionPrefix(props):
393 branch = props.getProperty("branch")
394 basever = GetBaseVersion(branch)
395 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
396 return "%s/" % props["tag"]
397 elif basever != "master":
398 return "%s-SNAPSHOT/" % basever
399 else:
400 return ""
401
402 def GetNextBuild(builder, requests):
403 for r in requests:
404 if r.properties and r.properties.hasProperty("tag"):
405 return r
406
407 r = requests[0]
408 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
409 return r
410
411 def MakeEnv(overrides=None, tryccache=False):
412 env = {
413 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
414 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
415 }
416 if tryccache:
417 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
418 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
419 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
420 else:
421 env['CC'] = env['CCC']
422 env['CXX'] = env['CCXX']
423 env['CCACHE'] = ''
424 if overrides is not None:
425 env.update(overrides)
426 return env
427
428 @properties.renderer
429 def NetLockDl(props):
430 lock = None
431 if props.hasProperty("dl_lock"):
432 lock = NetLocks[props["dl_lock"]]
433 if lock is not None:
434 return [lock.access('exclusive')]
435 else:
436 return []
437
438 @properties.renderer
439 def NetLockUl(props):
440 lock = None
441 if props.hasProperty("ul_lock"):
442 lock = NetLocks[props["ul_lock"]]
443 if lock is not None:
444 return [lock.access('exclusive')]
445 else:
446 return []
447
448 @util.renderer
449 def TagPropertyValue(props):
450 if props.hasProperty("options"):
451 options = props.getProperty("options")
452 if type(options) is dict:
453 return options.get("tag")
454 return None
455
456 def IsTargetSelected(target):
457 def CheckTargetProperty(step):
458 try:
459 options = step.getProperty("options")
460 if type(options) is dict:
461 selected_target = options.get("target", "all")
462 if selected_target != "all" and selected_target != target:
463 return False
464 except KeyError:
465 pass
466
467 return True
468
469 return CheckTargetProperty
470
471 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
472 try:
473 seckey = base64.b64decode(seckey)
474 except:
475 return None
476
477 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
478 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
479
480
481 c['builders'] = []
482
483 dlLock = locks.WorkerLock("worker_dl")
484
485 workerNames = [ ]
486
487 for worker in c['workers']:
488 workerNames.append(worker.workername)
489
490 force_factory = BuildFactory()
491
492 c['builders'].append(BuilderConfig(
493 name = "00_force_build",
494 workernames = workerNames,
495 factory = force_factory))
496
497 for target in targets:
498 ts = target.split('/')
499
500 factory = BuildFactory()
501
502 # setup shared work directory if required
503 factory.addStep(ShellCommand(
504 name = "sharedwd",
505 description = "Setting up shared work directory",
506 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
507 workdir = ".",
508 haltOnFailure = True))
509
510 # find number of cores
511 factory.addStep(SetPropertyFromCommand(
512 name = "nproc",
513 property = "nproc",
514 description = "Finding number of CPUs",
515 command = ["nproc"]))
516
517 # find gcc and g++ compilers
518 factory.addStep(FileDownload(
519 name = "dlfindbinpl",
520 mastersrc = scripts_dir + '/findbin.pl',
521 workerdest = "../findbin.pl",
522 mode = 0o755))
523
524 factory.addStep(SetPropertyFromCommand(
525 name = "gcc",
526 property = "cc_command",
527 description = "Finding gcc command",
528 command = [
529 "../findbin.pl", "gcc", "", "",
530 ],
531 haltOnFailure = True))
532
533 factory.addStep(SetPropertyFromCommand(
534 name = "g++",
535 property = "cxx_command",
536 description = "Finding g++ command",
537 command = [
538 "../findbin.pl", "g++", "", "",
539 ],
540 haltOnFailure = True))
541
542 # see if ccache is available
543 factory.addStep(SetPropertyFromCommand(
544 property = "ccache_command",
545 command = ["which", "ccache"],
546 description = "Testing for ccache command",
547 haltOnFailure = False,
548 flunkOnFailure = False,
549 warnOnFailure = False,
550 ))
551
552 # Workaround bug when switching from a checked out tag back to a branch
553 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
554 factory.addStep(ShellCommand(
555 name = "gitcheckout",
556 description = "Ensure that Git HEAD is sane",
557 command = Interpolate("if [ -d .git ]; then git checkout -f %(prop:branch)s && git branch --set-upstream-to origin/%(prop:branch)s || rm -fr .git; else exit 0; fi"),
558 haltOnFailure = True))
559
560 # check out the source
561 # Git() runs:
562 # if repo doesn't exist: 'git clone repourl'
563 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
564 # 'git fetch -t repourl branch; git reset --hard revision'
565 factory.addStep(Git(
566 name = "git",
567 repourl = repo_url,
568 branch = Interpolate("%(prop:branch)s"),
569 mode = 'full',
570 method = 'fresh',
571 locks = NetLockDl,
572 haltOnFailure = True,
573 ))
574
575 # update remote refs
576 factory.addStep(ShellCommand(
577 name = "fetchrefs",
578 description = "Fetching Git remote refs",
579 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
580 haltOnFailure = True
581 ))
582
583 # switch to tag
584 factory.addStep(ShellCommand(
585 name = "switchtag",
586 description = "Checking out Git tag",
587 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
588 haltOnFailure = True,
589 doStepIf = IsTaggingRequested
590 ))
591
592 # Verify that Git HEAD points to a tag or branch
593 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
594 factory.addStep(ShellCommand(
595 name = "gitverify",
596 description = "Ensure that Git HEAD is pointing to a branch or tag",
597 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
598 haltOnFailure = True))
599
600 factory.addStep(ShellCommand(
601 name = "rmtmp",
602 description = "Remove tmp folder",
603 command=["rm", "-rf", "tmp/"]))
604
605 # feed
606 factory.addStep(ShellCommand(
607 name = "rmfeedlinks",
608 description = "Remove feed symlinks",
609 command=["rm", "-rf", "package/feeds/"]))
610
611 factory.addStep(StringDownload(
612 name = "ccachecc",
613 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
614 workerdest = "../ccache_cc.sh",
615 mode = 0o755,
616 ))
617
618 factory.addStep(StringDownload(
619 name = "ccachecxx",
620 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
621 workerdest = "../ccache_cxx.sh",
622 mode = 0o755,
623 ))
624
625 # feed
626 factory.addStep(ShellCommand(
627 name = "updatefeeds",
628 description = "Updating feeds",
629 command=["./scripts/feeds", "update"],
630 env = MakeEnv(tryccache=True),
631 haltOnFailure = True,
632 locks = NetLockDl,
633 ))
634
635 # feed
636 factory.addStep(ShellCommand(
637 name = "installfeeds",
638 description = "Installing feeds",
639 command=["./scripts/feeds", "install", "-a"],
640 env = MakeEnv(tryccache=True),
641 haltOnFailure = True
642 ))
643
644 # seed config
645 if config_seed is not None:
646 factory.addStep(StringDownload(
647 name = "dlconfigseed",
648 s = config_seed + '\n',
649 workerdest = ".config",
650 mode = 0o644
651 ))
652
653 # configure
654 factory.addStep(ShellCommand(
655 name = "newconfig",
656 description = "Seeding .config",
657 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
658 ))
659
660 factory.addStep(ShellCommand(
661 name = "delbin",
662 description = "Removing output directory",
663 command = ["rm", "-rf", "bin/"]
664 ))
665
666 factory.addStep(ShellCommand(
667 name = "defconfig",
668 description = "Populating .config",
669 command = ["make", "defconfig"],
670 env = MakeEnv()
671 ))
672
673 # check arch
674 factory.addStep(ShellCommand(
675 name = "checkarch",
676 description = "Checking architecture",
677 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
678 logEnviron = False,
679 want_stdout = False,
680 want_stderr = False,
681 haltOnFailure = True
682 ))
683
684 # find libc suffix
685 factory.addStep(SetPropertyFromCommand(
686 name = "libc",
687 property = "libc",
688 description = "Finding libc suffix",
689 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
690
691 # install build key
692 if usign_key is not None:
693 factory.addStep(StringDownload(
694 name = "dlkeybuildpub",
695 s = UsignSec2Pub(usign_key, usign_comment),
696 workerdest = "key-build.pub",
697 mode = 0o600,
698 ))
699
700 factory.addStep(StringDownload(
701 name = "dlkeybuild",
702 s = "# fake private key",
703 workerdest = "key-build",
704 mode = 0o600,
705 ))
706
707 factory.addStep(StringDownload(
708 name = "dlkeybuilducert",
709 s = "# fake certificate",
710 workerdest = "key-build.ucert",
711 mode = 0o600,
712 ))
713
714 # prepare dl
715 factory.addStep(ShellCommand(
716 name = "dldir",
717 description = "Preparing dl/",
718 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
719 logEnviron = False,
720 want_stdout = False
721 ))
722
723 # prepare tar
724 factory.addStep(ShellCommand(
725 name = "dltar",
726 description = "Building and installing GNU tar",
727 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
728 env = MakeEnv(tryccache=True),
729 haltOnFailure = True
730 ))
731
732 # populate dl
733 factory.addStep(ShellCommand(
734 name = "dlrun",
735 description = "Populating dl/",
736 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
737 env = MakeEnv(),
738 logEnviron = False,
739 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
740 ))
741
742 factory.addStep(ShellCommand(
743 name = "cleanbase",
744 description = "Cleaning base-files",
745 command=["make", "package/base-files/clean", "V=s"]
746 ))
747
748 # build
749 factory.addStep(ShellCommand(
750 name = "tools",
751 description = "Building and installing tools",
752 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
753 env = MakeEnv(tryccache=True),
754 haltOnFailure = True
755 ))
756
757 factory.addStep(ShellCommand(
758 name = "toolchain",
759 description = "Building and installing toolchain",
760 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
761 env = MakeEnv(),
762 haltOnFailure = True
763 ))
764
765 factory.addStep(ShellCommand(
766 name = "kmods",
767 description = "Building kmods",
768 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
769 env = MakeEnv(),
770 haltOnFailure = True
771 ))
772
773 # find kernel version
774 factory.addStep(SetPropertyFromCommand(
775 name = "kernelversion",
776 property = "kernelversion",
777 description = "Finding the effective Kernel version",
778 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
779 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
780 ))
781
782 factory.addStep(ShellCommand(
783 name = "pkgclean",
784 description = "Cleaning up package build",
785 command=["make", "package/cleanup", "V=s"]
786 ))
787
788 factory.addStep(ShellCommand(
789 name = "pkgbuild",
790 description = "Building packages",
791 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
792 env = MakeEnv(),
793 haltOnFailure = True
794 ))
795
796 factory.addStep(ShellCommand(
797 name = "pkginstall",
798 description = "Installing packages",
799 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
800 env = MakeEnv(),
801 haltOnFailure = True
802 ))
803
804 factory.addStep(ShellCommand(
805 name = "pkgindex",
806 description = "Indexing packages",
807 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
808 env = MakeEnv(),
809 haltOnFailure = True
810 ))
811
812 factory.addStep(ShellCommand(
813 name = "images",
814 description = "Building and installing images",
815 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
816 env = MakeEnv(),
817 haltOnFailure = True
818 ))
819
820 factory.addStep(ShellCommand(
821 name = "buildinfo",
822 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
823 command = "make -j1 buildinfo V=s || true",
824 env = MakeEnv(),
825 haltOnFailure = True
826 ))
827
828 factory.addStep(ShellCommand(
829 name = "json_overview_image_info",
830 description = "Generate profiles.json in target folder",
831 command = "make -j1 json_overview_image_info V=s || true",
832 env = MakeEnv(),
833 haltOnFailure = True
834 ))
835
836 factory.addStep(ShellCommand(
837 name = "checksums",
838 description = "Calculating checksums",
839 command=["make", "-j1", "checksum", "V=s"],
840 env = MakeEnv(),
841 haltOnFailure = True
842 ))
843
844 if enable_kmod_archive:
845 factory.addStep(ShellCommand(
846 name = "kmoddir",
847 description = "Creating kmod directory",
848 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
849 haltOnFailure = True
850 ))
851
852 factory.addStep(ShellCommand(
853 name = "kmodprepare",
854 description = "Preparing kmod archive",
855 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
856 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
857 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
858 haltOnFailure = True
859 ))
860
861 factory.addStep(ShellCommand(
862 name = "kmodindex",
863 description = "Indexing kmod archive",
864 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
865 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
866 env = MakeEnv(),
867 haltOnFailure = True
868 ))
869
870 # sign
871 if ini.has_option("gpg", "key") or usign_key is not None:
872 factory.addStep(MasterShellCommand(
873 name = "signprepare",
874 description = "Preparing temporary signing directory",
875 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
876 haltOnFailure = True
877 ))
878
879 factory.addStep(ShellCommand(
880 name = "signpack",
881 description = "Packing files to sign",
882 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
883 haltOnFailure = True
884 ))
885
886 factory.addStep(FileUpload(
887 workersrc = "sign.tar.gz",
888 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
889 haltOnFailure = True
890 ))
891
892 factory.addStep(MasterShellCommand(
893 name = "signfiles",
894 description = "Signing files",
895 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
896 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
897 haltOnFailure = True
898 ))
899
900 factory.addStep(FileDownload(
901 name = "dlsigntargz",
902 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
903 workerdest = "sign.tar.gz",
904 haltOnFailure = True
905 ))
906
907 factory.addStep(ShellCommand(
908 name = "signunpack",
909 description = "Unpacking signed files",
910 command = ["tar", "-xzf", "sign.tar.gz"],
911 haltOnFailure = True
912 ))
913
914 # upload
915 factory.addStep(ShellCommand(
916 name = "dirprepare",
917 description = "Preparing upload directory structure",
918 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
919 haltOnFailure = True
920 ))
921
922 factory.addStep(ShellCommand(
923 name = "linkprepare",
924 description = "Preparing repository symlink",
925 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
926 doStepIf = IsNoMasterBuild,
927 haltOnFailure = True
928 ))
929
930 if enable_kmod_archive:
931 factory.addStep(ShellCommand(
932 name = "kmoddirprepare",
933 description = "Preparing kmod archive upload directory",
934 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
935 haltOnFailure = True
936 ))
937
938 factory.addStep(ShellCommand(
939 name = "dirupload",
940 description = "Uploading directory structure",
941 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
942 env={'RSYNC_PASSWORD': rsync_bin_key},
943 haltOnFailure = True,
944 logEnviron = False,
945 locks = NetLockUl,
946 ))
947
948 # download remote sha256sums to 'target-sha256sums'
949 factory.addStep(ShellCommand(
950 name = "target-sha256sums",
951 description = "Fetching remote sha256sums for target",
952 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
953 env={'RSYNC_PASSWORD': rsync_bin_key},
954 logEnviron = False,
955 haltOnFailure = False,
956 flunkOnFailure = False,
957 warnOnFailure = False,
958 ))
959
960 # build list of files to upload
961 factory.addStep(FileDownload(
962 name = "dlsha2rsyncpl",
963 mastersrc = scripts_dir + '/sha2rsync.pl',
964 workerdest = "../sha2rsync.pl",
965 mode = 0o755,
966 ))
967
968 factory.addStep(ShellCommand(
969 name = "buildlist",
970 description = "Building list of files to upload",
971 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
972 haltOnFailure = True,
973 ))
974
975 factory.addStep(FileDownload(
976 name = "dlrsync.sh",
977 mastersrc = scripts_dir + '/rsync.sh',
978 workerdest = "../rsync.sh",
979 mode = 0o755
980 ))
981
982 # upload new files and update existing ones
983 factory.addStep(ShellCommand(
984 name = "targetupload",
985 description = "Uploading target files",
986 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
987 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
988 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
989 env={'RSYNC_PASSWORD': rsync_bin_key},
990 haltOnFailure = True,
991 logEnviron = False,
992 ))
993
994 # delete files which don't exist locally
995 factory.addStep(ShellCommand(
996 name = "targetprune",
997 description = "Pruning target files",
998 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
999 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1000 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1001 env={'RSYNC_PASSWORD': rsync_bin_key},
1002 haltOnFailure = True,
1003 logEnviron = False,
1004 locks = NetLockUl,
1005 ))
1006
1007 if enable_kmod_archive:
1008 factory.addStep(ShellCommand(
1009 name = "kmodupload",
1010 description = "Uploading kmod archive",
1011 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1012 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1013 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1014 env={'RSYNC_PASSWORD': rsync_bin_key},
1015 haltOnFailure = True,
1016 logEnviron = False,
1017 locks = NetLockUl,
1018 ))
1019
1020 if rsync_src_url is not None:
1021 factory.addStep(ShellCommand(
1022 name = "sourcelist",
1023 description = "Finding source archives to upload",
1024 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1025 haltOnFailure = True
1026 ))
1027
1028 factory.addStep(ShellCommand(
1029 name = "sourceupload",
1030 description = "Uploading source archives",
1031 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1032 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1033 env={'RSYNC_PASSWORD': rsync_src_key},
1034 haltOnFailure = True,
1035 logEnviron = False,
1036 locks = NetLockUl,
1037 ))
1038
1039 factory.addStep(ShellCommand(
1040 name = "df",
1041 description = "Reporting disk usage",
1042 command=["df", "-h", "."],
1043 env={'LC_ALL': 'C'},
1044 haltOnFailure = False,
1045 flunkOnFailure = False,
1046 warnOnFailure = False,
1047 alwaysRun = True
1048 ))
1049
1050 factory.addStep(ShellCommand(
1051 name = "du",
1052 description = "Reporting estimated file space usage",
1053 command=["du", "-sh", "."],
1054 env={'LC_ALL': 'C'},
1055 haltOnFailure = False,
1056 flunkOnFailure = False,
1057 warnOnFailure = False,
1058 alwaysRun = True
1059 ))
1060
1061 factory.addStep(ShellCommand(
1062 name = "ccachestat",
1063 description = "Reporting ccache stats",
1064 command=["ccache", "-s"],
1065 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1066 want_stderr = False,
1067 haltOnFailure = False,
1068 flunkOnFailure = False,
1069 warnOnFailure = False,
1070 alwaysRun = True,
1071 ))
1072
1073 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1074
1075 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1076 force_factory.addStep(steps.Trigger(
1077 name = "trigger_%s" % target,
1078 description = "Triggering %s build" % target,
1079 schedulerNames = [ "trigger_%s" % target ],
1080 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1081 doStepIf = IsTargetSelected(target)
1082 ))
1083
1084
1085 ####### STATUS TARGETS
1086
1087 # 'status' is a list of Status Targets. The results of each build will be
1088 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1089 # including web pages, email senders, and IRC bots.
1090
1091 if "status_bind" in inip1:
1092 c['www'] = {
1093 'port': inip1.get("status_bind"),
1094 'plugins': {
1095 'waterfall_view': True,
1096 'console_view': True,
1097 'grid_view': True
1098 }
1099 }
1100
1101 if "status_user" in inip1 and "status_password" in inip1:
1102 c['www']['auth'] = util.UserPasswordAuth([
1103 (inip1.get("status_user"), inip1.get("status_password"))
1104 ])
1105 c['www']['authz'] = util.Authz(
1106 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1107 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1108 )
1109
1110 c['services'] = []
1111 if ini.has_section("irc"):
1112 iniirc = ini['irc']
1113 irc_host = iniirc.get("host", None)
1114 irc_port = iniirc.getint("port", 6667)
1115 irc_chan = iniirc.get("channel", None)
1116 irc_nick = iniirc.get("nickname", None)
1117 irc_pass = iniirc.get("password", None)
1118
1119 if irc_host and irc_nick and irc_chan:
1120 irc = reporters.IRC(irc_host, irc_nick,
1121 port = irc_port,
1122 password = irc_pass,
1123 channels = [ irc_chan ],
1124 notify_events = [ 'exception', 'problem', 'recovery' ]
1125 )
1126
1127 c['services'].append(irc)
1128
1129 c['revlink'] = util.RevlinkMatch([
1130 r'https://git.openwrt.org/openwrt/(.*).git'
1131 ],
1132 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1133
1134 ####### DB URL
1135
1136 c['db'] = {
1137 # This specifies what database buildbot uses to store its state. You can leave
1138 # this at its default for all but the largest installations.
1139 'db_url' : "sqlite:///state.sqlite",
1140 }
1141
1142 c['buildbotNetUsageData'] = None