a5b7a6b2fdb4b5336ef729913f9f498b829a590b
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 # This is the dictionary that the buildmaster pays attention to. We also use
55 # a shorter alias to save typing.
56 c = BuildmasterConfig = {}
57
58 ####### PROJECT IDENTITY
59
60 # the 'title' string will appear at the top of this buildbot
61 # installation's html.WebStatus home page (linked to the
62 # 'titleURL') and is embedded in the title of the waterfall HTML page.
63
64 c['title'] = ini.get("general", "title")
65 c['titleURL'] = ini.get("general", "title_url")
66
67 # the 'buildbotURL' string should point to the location where the buildbot's
68 # internal web server (usually the html.WebStatus page) is visible. This
69 # typically uses the port number set in the Waterfall 'status' entry, but
70 # with an externally-visible host name which the buildbot cannot figure out
71 # without some help.
72
73 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
74
75 ####### BUILDWORKERS
76
77 # The 'workers' list defines the set of recognized buildworkers. Each element is
78 # a Worker object, specifying a unique worker name and password. The same
79 # worker name and password must be configured on the worker.
80
81 worker_port = 9989
82
83 if ini.has_option("phase1", "port"):
84 worker_port = ini.get("phase1", "port")
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
122 # This must match the value configured into the buildworkers (with their
123 # --master option)
124 c['protocols'] = {'pb': {'port': worker_port}}
125
126 # coalesce builds
127 c['collapseRequests'] = True
128
129 # Reduce amount of backlog data
130 c['configurators'] = [util.JanitorConfigurator(
131 logHorizon=timedelta(days=3),
132 hour=6,
133 )]
134
135 @defer.inlineCallbacks
136 def getNewestCompleteTime(bldr):
137 """Returns the complete_at of the latest completed and not SKIPPED
138 build request for this builder, or None if there are no such build
139 requests. We need to filter out SKIPPED requests because we're
140 using collapseRequests=True which is unfortunately marking all
141 previous requests as complete when new buildset is created.
142
143 @returns: datetime instance or None, via Deferred
144 """
145
146 bldrid = yield bldr.getBuilderId()
147 completed = yield bldr.master.data.get(
148 ('builders', bldrid, 'buildrequests'),
149 [
150 resultspec.Filter('complete', 'eq', [True]),
151 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 ],
153 order=['-complete_at'], limit=1)
154 if not completed:
155 return
156
157 complete_at = completed[0]['complete_at']
158
159 last_build = yield bldr.master.data.get(
160 ('builds', ),
161 [
162 resultspec.Filter('builderid', 'eq', [bldrid]),
163 ],
164 order=['-started_at'], limit=1)
165
166 if last_build and last_build[0]:
167 last_complete_at = last_build[0]['complete_at']
168 if last_complete_at and (last_complete_at > complete_at):
169 return last_complete_at
170
171 return complete_at
172
173 @defer.inlineCallbacks
174 def prioritizeBuilders(master, builders):
175 """Returns sorted list of builders by their last timestamp of completed and
176 not skipped build.
177
178 @returns: list of sorted builders
179 """
180
181 def is_building(bldr):
182 return bool(bldr.building) or bool(bldr.old_building)
183
184 def bldr_info(bldr):
185 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
186 d.addCallback(lambda complete_at: (complete_at, bldr))
187 return d
188
189 def bldr_sort(item):
190 (complete_at, bldr) = item
191
192 if not complete_at:
193 date = datetime.min
194 complete_at = date.replace(tzinfo=tzutc())
195
196 if is_building(bldr):
197 date = datetime.max
198 complete_at = date.replace(tzinfo=tzutc())
199
200 return (complete_at, bldr.name)
201
202 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
203 results.sort(key=bldr_sort)
204
205 for r in results:
206 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207
208 return [r[1] for r in results]
209
210 c['prioritizeBuilders'] = prioritizeBuilders
211
212 ####### CHANGESOURCES
213
214 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
215 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = 0
217
218 cc_command = "gcc"
219 cxx_command = "g++"
220
221 config_seed = ""
222
223 if ini.has_option("phase1", "expire"):
224 tree_expire = ini.getint("phase1", "expire")
225
226 if ini.has_option("phase1", "config_seed"):
227 config_seed = ini.get("phase1", "config_seed")
228
229 repo_url = ini.get("repo", "url")
230 repo_branch = "master"
231
232 if ini.has_option("repo", "branch"):
233 repo_branch = ini.get("repo", "branch")
234
235 rsync_bin_url = ini.get("rsync", "binary_url")
236 rsync_bin_key = ini.get("rsync", "binary_password")
237 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
238
239 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
240 rsync_bin_defopts += ["--contimeout=20"]
241
242 rsync_src_url = None
243 rsync_src_key = None
244 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
245
246 if ini.has_option("rsync", "source_url"):
247 rsync_src_url = ini.get("rsync", "source_url")
248 rsync_src_key = ini.get("rsync", "source_password")
249
250 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
251 rsync_src_defopts += ["--contimeout=20"]
252
253 usign_key = None
254 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
255
256 if ini.has_option("usign", "key"):
257 usign_key = ini.get("usign", "key")
258
259 if ini.has_option("usign", "comment"):
260 usign_comment = ini.get("usign", "comment")
261
262 enable_kmod_archive = False
263
264 if ini.has_option("phase1", "kmod_archive"):
265 enable_kmod_archive = ini.getboolean("phase1", "kmod_archive")
266
267
268 # find targets
269 targets = [ ]
270
271 if not os.path.isdir(work_dir+'/source.git'):
272 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
273 else:
274 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
275
276 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
277 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
278 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
279
280 while True:
281 line = findtargets.stdout.readline()
282 if not line:
283 break
284 ta = line.decode().strip().split(' ')
285 targets.append(ta[0])
286
287
288 # the 'change_source' setting tells the buildmaster how it should find out
289 # about source code changes. Here we point to the buildbot clone of pyflakes.
290
291 c['change_source'] = []
292 c['change_source'].append(GitPoller(
293 repo_url,
294 workdir=work_dir+'/work.git', branch=repo_branch,
295 pollinterval=300))
296
297 ####### SCHEDULERS
298
299 # Configure the Schedulers, which decide how to react to incoming changes. In this
300 # case, just kick off a 'basebuild' build
301
302 class TagChoiceParameter(BaseParameter):
303 spec_attributes = ["strict", "choices"]
304 type = "list"
305 strict = True
306
307 def __init__(self, name, label=None, **kw):
308 super().__init__(name, label, **kw)
309 self._choice_list = []
310
311 @property
312 def choices(self):
313 taglist = []
314 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
315
316 if basever:
317 findtags = subprocess.Popen(
318 ['git', 'ls-remote', '--tags', repo_url],
319 stdout = subprocess.PIPE)
320
321 while True:
322 line = findtags.stdout.readline()
323
324 if not line:
325 break
326
327 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
328
329 if tagver and tagver[1].find(basever[1]) == 0:
330 taglist.append(tagver[1])
331
332 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
333 taglist.insert(0, '')
334
335 self._choice_list = taglist
336
337 return self._choice_list
338
339 def parse_from_arg(self, s):
340 if self.strict and s not in self._choice_list:
341 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
342 return s
343
344 c['schedulers'] = []
345 c['schedulers'].append(SingleBranchScheduler(
346 name = "all",
347 change_filter = filter.ChangeFilter(branch=repo_branch),
348 treeStableTimer = 60,
349 builderNames = targets))
350
351 c['schedulers'].append(ForceScheduler(
352 name = "force",
353 buttonName = "Force builds",
354 label = "Force build details",
355 builderNames = [ "00_force_build" ],
356
357 codebases = [
358 util.CodebaseParameter(
359 "",
360 label = "Repository",
361 branch = util.FixedParameter(name = "branch", default = ""),
362 revision = util.FixedParameter(name = "revision", default = ""),
363 repository = util.FixedParameter(name = "repository", default = ""),
364 project = util.FixedParameter(name = "project", default = "")
365 )
366 ],
367
368 reason = util.StringParameter(
369 name = "reason",
370 label = "Reason",
371 default = "Trigger build",
372 required = True,
373 size = 80
374 ),
375
376 properties = [
377 util.NestedParameter(
378 name="options",
379 label="Build Options",
380 layout="vertical",
381 fields=[
382 util.ChoiceStringParameter(
383 name = "target",
384 label = "Build target",
385 default = "all",
386 choices = [ "all" ] + targets
387 ),
388 TagChoiceParameter(
389 name = "tag",
390 label = "Build tag",
391 default = ""
392 )
393 ]
394 )
395 ]
396 ))
397
398 ####### BUILDERS
399
400 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
401 # what steps, and which workers can execute them. Note that any particular build will
402 # only take place on one worker.
403
404 CleanTargetMap = [
405 [ "tools", "tools/clean" ],
406 [ "chain", "toolchain/clean" ],
407 [ "linux", "target/linux/clean" ],
408 [ "dir", "dirclean" ],
409 [ "dist", "distclean" ]
410 ]
411
412 def IsMakeCleanRequested(pattern):
413 def CheckCleanProperty(step):
414 val = step.getProperty("clean")
415 if val and re.match(pattern, val):
416 return True
417 else:
418 return False
419
420 return CheckCleanProperty
421
422 def IsSharedWorkdir(step):
423 return bool(step.getProperty("shared_wd"))
424
425 def IsCleanupRequested(step):
426 if IsSharedWorkdir(step):
427 return False
428 do_cleanup = step.getProperty("do_cleanup")
429 if do_cleanup:
430 return True
431 else:
432 return False
433
434 def IsExpireRequested(step):
435 if IsSharedWorkdir(step):
436 return False
437 else:
438 return not IsCleanupRequested(step)
439
440 def IsGitFreshRequested(step):
441 do_cleanup = step.getProperty("do_cleanup")
442 if do_cleanup:
443 return True
444 else:
445 return False
446
447 def IsGitCleanRequested(step):
448 return not IsGitFreshRequested(step)
449
450 def IsTaggingRequested(step):
451 val = step.getProperty("tag")
452 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
453 return True
454 else:
455 return False
456
457 def IsNoTaggingRequested(step):
458 return not IsTaggingRequested(step)
459
460 def IsNoMasterBuild(step):
461 return repo_branch != "master"
462
463 def GetBaseVersion():
464 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
465 return repo_branch.split('-')[1]
466 else:
467 return "master"
468
469 @properties.renderer
470 def GetVersionPrefix(props):
471 basever = GetBaseVersion()
472 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
473 return "%s/" % props["tag"]
474 elif basever != "master":
475 return "%s-SNAPSHOT/" % basever
476 else:
477 return ""
478
479 @properties.renderer
480 def GetNumJobs(props):
481 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
482 return str(int(int(props["nproc"]) / props["max_builds"]))
483 else:
484 return "1"
485
486 @properties.renderer
487 def GetCC(props):
488 if props.hasProperty("cc_command"):
489 return props["cc_command"]
490 else:
491 return "gcc"
492
493 @properties.renderer
494 def GetCXX(props):
495 if props.hasProperty("cxx_command"):
496 return props["cxx_command"]
497 else:
498 return "g++"
499
500 @properties.renderer
501 def GetCwd(props):
502 if props.hasProperty("builddir"):
503 return props["builddir"]
504 elif props.hasProperty("workdir"):
505 return props["workdir"]
506 else:
507 return "/"
508
509 @properties.renderer
510 def GetCCache(props):
511 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
512 return props["ccache_command"]
513 else:
514 return ""
515
516 def GetNextBuild(builder, requests):
517 for r in requests:
518 if r.properties and r.properties.hasProperty("tag"):
519 return r
520
521 r = requests[0]
522 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
523 return r
524
525 def MakeEnv(overrides=None, tryccache=False):
526 env = {
527 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
528 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
529 }
530 if tryccache:
531 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
532 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
533 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
534 else:
535 env['CC'] = env['CCC']
536 env['CXX'] = env['CCXX']
537 env['CCACHE'] = ''
538 if overrides is not None:
539 env.update(overrides)
540 return env
541
542 @properties.renderer
543 def NetLockDl(props):
544 lock = None
545 if props.hasProperty("dl_lock"):
546 lock = NetLocks[props["dl_lock"]]
547 if lock is not None:
548 return [lock.access('exclusive')]
549 else:
550 return []
551
552 @properties.renderer
553 def NetLockUl(props):
554 lock = None
555 if props.hasProperty("ul_lock"):
556 lock = NetLocks[props["ul_lock"]]
557 if lock is not None:
558 return [lock.access('exclusive')]
559 else:
560 return []
561
562 @util.renderer
563 def TagPropertyValue(props):
564 if props.hasProperty("options"):
565 options = props.getProperty("options")
566 if type(options) is dict:
567 return options.get("tag")
568 return None
569
570 def IsTargetSelected(target):
571 def CheckTargetProperty(step):
572 try:
573 options = step.getProperty("options")
574 if type(options) is dict:
575 selected_target = options.get("target", "all")
576 if selected_target != "all" and selected_target != target:
577 return False
578 except KeyError:
579 pass
580
581 return True
582
583 return CheckTargetProperty
584
585 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
586 try:
587 seckey = base64.b64decode(seckey)
588 except:
589 return None
590
591 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
592 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
593
594
595 c['builders'] = []
596
597 dlLock = locks.WorkerLock("worker_dl")
598
599 checkBuiltin = re.sub('[\t\n ]+', ' ', """
600 checkBuiltin() {
601 local symbol op path file;
602 for file in $CHANGED_FILES; do
603 case "$file" in
604 package/*/*) : ;;
605 *) return 0 ;;
606 esac;
607 done;
608 while read symbol op path; do
609 case "$symbol" in package-*)
610 symbol="${symbol##*(}";
611 symbol="${symbol%)}";
612 for file in $CHANGED_FILES; do
613 case "$file" in "package/$path/"*)
614 grep -qsx "$symbol=y" .config && return 0
615 ;; esac;
616 done;
617 esac;
618 done < tmp/.packagedeps;
619 return 1;
620 }
621 """).strip()
622
623
624 class IfBuiltinShellCommand(ShellCommand):
625 def _quote(self, str):
626 if re.search("[^a-zA-Z0-9/_.-]", str):
627 return "'%s'" %(re.sub("'", "'\"'\"'", str))
628 return str
629
630 def setCommand(self, command):
631 if not isinstance(command, (str, unicode)):
632 command = ' '.join(map(self._quote, command))
633 self.command = [
634 '/bin/sh', '-c',
635 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
636 ]
637
638 def setupEnvironment(self, cmd):
639 workerEnv = self.workerEnvironment
640 if workerEnv is None:
641 workerEnv = { }
642 changedFiles = { }
643 for request in self.build.requests:
644 for source in request.sources:
645 for change in source.changes:
646 for file in change.files:
647 changedFiles[file] = True
648 fullSlaveEnv = workerEnv.copy()
649 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
650 cmd.args['env'] = fullSlaveEnv
651
652 workerNames = [ ]
653
654 for worker in c['workers']:
655 workerNames.append(worker.workername)
656
657 force_factory = BuildFactory()
658
659 c['builders'].append(BuilderConfig(
660 name = "00_force_build",
661 workernames = workerNames,
662 factory = force_factory))
663
664 for target in targets:
665 ts = target.split('/')
666
667 factory = BuildFactory()
668
669 # setup shared work directory if required
670 factory.addStep(ShellCommand(
671 name = "sharedwd",
672 description = "Setting up shared work directory",
673 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
674 workdir = ".",
675 haltOnFailure = True,
676 doStepIf = IsSharedWorkdir))
677
678 # find number of cores
679 factory.addStep(SetPropertyFromCommand(
680 name = "nproc",
681 property = "nproc",
682 description = "Finding number of CPUs",
683 command = ["nproc"]))
684
685 # find gcc and g++ compilers
686 factory.addStep(FileDownload(
687 name = "dlfindbinpl",
688 mastersrc = scripts_dir + '/findbin.pl',
689 workerdest = "../findbin.pl",
690 mode = 0o755))
691
692 factory.addStep(SetPropertyFromCommand(
693 name = "gcc",
694 property = "cc_command",
695 description = "Finding gcc command",
696 command = [
697 "../findbin.pl", "gcc", "", "",
698 ],
699 haltOnFailure = True))
700
701 factory.addStep(SetPropertyFromCommand(
702 name = "g++",
703 property = "cxx_command",
704 description = "Finding g++ command",
705 command = [
706 "../findbin.pl", "g++", "", "",
707 ],
708 haltOnFailure = True))
709
710 # see if ccache is available
711 factory.addStep(SetPropertyFromCommand(
712 property = "ccache_command",
713 command = ["which", "ccache"],
714 description = "Testing for ccache command",
715 haltOnFailure = False,
716 flunkOnFailure = False,
717 warnOnFailure = False,
718 ))
719
720 # expire tree if needed
721 if tree_expire > 0:
722 factory.addStep(FileDownload(
723 name = "dlexpiresh",
724 doStepIf = IsExpireRequested,
725 mastersrc = scripts_dir + '/expire.sh',
726 workerdest = "../expire.sh",
727 mode = 0o755))
728
729 factory.addStep(ShellCommand(
730 name = "expire",
731 description = "Checking for build tree expiry",
732 command = ["./expire.sh", str(tree_expire)],
733 workdir = ".",
734 haltOnFailure = True,
735 doStepIf = IsExpireRequested,
736 timeout = 2400))
737
738 # cleanup.sh if needed
739 factory.addStep(FileDownload(
740 name = "dlcleanupsh",
741 mastersrc = scripts_dir + '/cleanup.sh',
742 workerdest = "../cleanup.sh",
743 mode = 0o755,
744 doStepIf = IsCleanupRequested))
745
746 factory.addStep(ShellCommand(
747 name = "cleanold",
748 description = "Cleaning previous builds",
749 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
750 workdir = ".",
751 haltOnFailure = True,
752 doStepIf = IsCleanupRequested,
753 timeout = 2400))
754
755 factory.addStep(ShellCommand(
756 name = "cleanup",
757 description = "Cleaning work area",
758 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
759 workdir = ".",
760 haltOnFailure = True,
761 doStepIf = IsCleanupRequested,
762 timeout = 2400))
763
764 # user-requested clean targets
765 for tuple in CleanTargetMap:
766 factory.addStep(ShellCommand(
767 name = tuple[1],
768 description = 'User-requested "make %s"' % tuple[1],
769 command = ["make", tuple[1], "V=s"],
770 env = MakeEnv(),
771 doStepIf = IsMakeCleanRequested(tuple[0])
772 ))
773
774 # Workaround bug when switching from a checked out tag back to a branch
775 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
776 factory.addStep(ShellCommand(
777 name = "gitcheckout",
778 description = "Ensure that Git HEAD is sane",
779 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
780 haltOnFailure = True))
781
782 # check out the source
783 # Git() runs:
784 # if repo doesn't exist: 'git clone repourl'
785 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
786 # 'git fetch -t repourl branch; git reset --hard revision'
787 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
788 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
789 factory.addStep(Git(
790 name = "gitclean",
791 repourl = repo_url,
792 branch = repo_branch,
793 mode = 'full',
794 method = 'clean',
795 locks = NetLockDl,
796 haltOnFailure = True,
797 doStepIf = IsGitCleanRequested,
798 ))
799
800 factory.addStep(Git(
801 name = "gitfresh",
802 repourl = repo_url,
803 branch = repo_branch,
804 mode = 'full',
805 method = 'fresh',
806 locks = NetLockDl,
807 haltOnFailure = True,
808 doStepIf = IsGitFreshRequested,
809 ))
810
811 # update remote refs
812 factory.addStep(ShellCommand(
813 name = "fetchrefs",
814 description = "Fetching Git remote refs",
815 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
816 haltOnFailure = True
817 ))
818
819 # switch to tag
820 factory.addStep(ShellCommand(
821 name = "switchtag",
822 description = "Checking out Git tag",
823 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
824 haltOnFailure = True,
825 doStepIf = IsTaggingRequested
826 ))
827
828 # Verify that Git HEAD points to a tag or branch
829 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
830 factory.addStep(ShellCommand(
831 name = "gitverify",
832 description = "Ensure that Git HEAD is pointing to a branch or tag",
833 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
834 haltOnFailure = True))
835
836 factory.addStep(ShellCommand(
837 name = "rmtmp",
838 description = "Remove tmp folder",
839 command=["rm", "-rf", "tmp/"]))
840
841 # feed
842 # factory.addStep(ShellCommand(
843 # name = "feedsconf",
844 # description = "Copy the feeds.conf",
845 # command='''cp ~/feeds.conf ./feeds.conf''' ))
846
847 # feed
848 factory.addStep(ShellCommand(
849 name = "rmfeedlinks",
850 description = "Remove feed symlinks",
851 command=["rm", "-rf", "package/feeds/"]))
852
853 factory.addStep(StringDownload(
854 name = "ccachecc",
855 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
856 workerdest = "../ccache_cc.sh",
857 mode = 0o755,
858 ))
859
860 factory.addStep(StringDownload(
861 name = "ccachecxx",
862 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
863 workerdest = "../ccache_cxx.sh",
864 mode = 0o755,
865 ))
866
867 # feed
868 factory.addStep(ShellCommand(
869 name = "updatefeeds",
870 description = "Updating feeds",
871 command=["./scripts/feeds", "update"],
872 env = MakeEnv(tryccache=True),
873 haltOnFailure = True,
874 locks = NetLockDl,
875 ))
876
877 # feed
878 factory.addStep(ShellCommand(
879 name = "installfeeds",
880 description = "Installing feeds",
881 command=["./scripts/feeds", "install", "-a"],
882 env = MakeEnv(tryccache=True),
883 haltOnFailure = True
884 ))
885
886 # seed config
887 if config_seed is not None:
888 factory.addStep(StringDownload(
889 name = "dlconfigseed",
890 s = config_seed + '\n',
891 workerdest = ".config",
892 mode = 0o644
893 ))
894
895 # configure
896 factory.addStep(ShellCommand(
897 name = "newconfig",
898 description = "Seeding .config",
899 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
900 ))
901
902 factory.addStep(ShellCommand(
903 name = "delbin",
904 description = "Removing output directory",
905 command = ["rm", "-rf", "bin/"]
906 ))
907
908 factory.addStep(ShellCommand(
909 name = "defconfig",
910 description = "Populating .config",
911 command = ["make", "defconfig"],
912 env = MakeEnv()
913 ))
914
915 # check arch
916 factory.addStep(ShellCommand(
917 name = "checkarch",
918 description = "Checking architecture",
919 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
920 logEnviron = False,
921 want_stdout = False,
922 want_stderr = False,
923 haltOnFailure = True
924 ))
925
926 # find libc suffix
927 factory.addStep(SetPropertyFromCommand(
928 name = "libc",
929 property = "libc",
930 description = "Finding libc suffix",
931 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
932
933 # install build key
934 if usign_key is not None:
935 factory.addStep(StringDownload(
936 name = "dlkeybuildpub",
937 s = UsignSec2Pub(usign_key, usign_comment),
938 workerdest = "key-build.pub",
939 mode = 0o600,
940 ))
941
942 factory.addStep(StringDownload(
943 name = "dlkeybuild",
944 s = "# fake private key",
945 workerdest = "key-build",
946 mode = 0o600,
947 ))
948
949 factory.addStep(StringDownload(
950 name = "dlkeybuilducert",
951 s = "# fake certificate",
952 workerdest = "key-build.ucert",
953 mode = 0o600,
954 ))
955
956 # prepare dl
957 factory.addStep(ShellCommand(
958 name = "dldir",
959 description = "Preparing dl/",
960 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
961 logEnviron = False,
962 want_stdout = False
963 ))
964
965 # prepare tar
966 factory.addStep(ShellCommand(
967 name = "dltar",
968 description = "Building and installing GNU tar",
969 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
970 env = MakeEnv(tryccache=True),
971 haltOnFailure = True
972 ))
973
974 # populate dl
975 factory.addStep(ShellCommand(
976 name = "dlrun",
977 description = "Populating dl/",
978 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
979 env = MakeEnv(),
980 logEnviron = False,
981 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
982 ))
983
984 factory.addStep(ShellCommand(
985 name = "cleanbase",
986 description = "Cleaning base-files",
987 command=["make", "package/base-files/clean", "V=s"]
988 ))
989
990 # build
991 factory.addStep(ShellCommand(
992 name = "tools",
993 description = "Building and installing tools",
994 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
995 env = MakeEnv(tryccache=True),
996 haltOnFailure = True
997 ))
998
999 factory.addStep(ShellCommand(
1000 name = "toolchain",
1001 description = "Building and installing toolchain",
1002 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
1003 env = MakeEnv(),
1004 haltOnFailure = True
1005 ))
1006
1007 factory.addStep(ShellCommand(
1008 name = "kmods",
1009 description = "Building kmods",
1010 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1011 env = MakeEnv(),
1012 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1013 haltOnFailure = True
1014 ))
1015
1016 # find kernel version
1017 factory.addStep(SetPropertyFromCommand(
1018 name = "kernelversion",
1019 property = "kernelversion",
1020 description = "Finding the effective Kernel version",
1021 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1022 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
1023 ))
1024
1025 factory.addStep(ShellCommand(
1026 name = "pkgclean",
1027 description = "Cleaning up package build",
1028 command=["make", "package/cleanup", "V=s"]
1029 ))
1030
1031 factory.addStep(ShellCommand(
1032 name = "pkgbuild",
1033 description = "Building packages",
1034 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
1035 env = MakeEnv(),
1036 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
1037 haltOnFailure = True
1038 ))
1039
1040 # factory.addStep(IfBuiltinShellCommand(
1041 factory.addStep(ShellCommand(
1042 name = "pkginstall",
1043 description = "Installing packages",
1044 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
1045 env = MakeEnv(),
1046 haltOnFailure = True
1047 ))
1048
1049 factory.addStep(ShellCommand(
1050 name = "pkgindex",
1051 description = "Indexing packages",
1052 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
1053 env = MakeEnv(),
1054 haltOnFailure = True
1055 ))
1056
1057 #factory.addStep(IfBuiltinShellCommand(
1058 factory.addStep(ShellCommand(
1059 name = "images",
1060 description = "Building and installing images",
1061 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
1062 env = MakeEnv(),
1063 haltOnFailure = True
1064 ))
1065
1066 factory.addStep(ShellCommand(
1067 name = "buildinfo",
1068 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1069 command = "make -j1 buildinfo V=s || true",
1070 env = MakeEnv(),
1071 haltOnFailure = True
1072 ))
1073
1074 factory.addStep(ShellCommand(
1075 name = "json_overview_image_info",
1076 description = "Generate profiles.json in target folder",
1077 command = "make -j1 json_overview_image_info V=s || true",
1078 env = MakeEnv(),
1079 haltOnFailure = True
1080 ))
1081
1082 factory.addStep(ShellCommand(
1083 name = "checksums",
1084 description = "Calculating checksums",
1085 command=["make", "-j1", "checksum", "V=s"],
1086 env = MakeEnv(),
1087 haltOnFailure = True
1088 ))
1089
1090 if enable_kmod_archive:
1091 factory.addStep(ShellCommand(
1092 name = "kmoddir",
1093 description = "Creating kmod directory",
1094 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1095 haltOnFailure = True
1096 ))
1097
1098 factory.addStep(ShellCommand(
1099 name = "kmodprepare",
1100 description = "Preparing kmod archive",
1101 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1102 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1103 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1104 haltOnFailure = True
1105 ))
1106
1107 factory.addStep(ShellCommand(
1108 name = "kmodindex",
1109 description = "Indexing kmod archive",
1110 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1111 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1112 env = MakeEnv(),
1113 haltOnFailure = True
1114 ))
1115
1116 # sign
1117 if ini.has_option("gpg", "key") or usign_key is not None:
1118 factory.addStep(MasterShellCommand(
1119 name = "signprepare",
1120 description = "Preparing temporary signing directory",
1121 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1122 haltOnFailure = True
1123 ))
1124
1125 factory.addStep(ShellCommand(
1126 name = "signpack",
1127 description = "Packing files to sign",
1128 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1129 haltOnFailure = True
1130 ))
1131
1132 factory.addStep(FileUpload(
1133 workersrc = "sign.tar.gz",
1134 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1135 haltOnFailure = True
1136 ))
1137
1138 factory.addStep(MasterShellCommand(
1139 name = "signfiles",
1140 description = "Signing files",
1141 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1142 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1143 haltOnFailure = True
1144 ))
1145
1146 factory.addStep(FileDownload(
1147 name = "dlsigntargz",
1148 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1149 workerdest = "sign.tar.gz",
1150 haltOnFailure = True
1151 ))
1152
1153 factory.addStep(ShellCommand(
1154 name = "signunpack",
1155 description = "Unpacking signed files",
1156 command = ["tar", "-xzf", "sign.tar.gz"],
1157 haltOnFailure = True
1158 ))
1159
1160 # upload
1161 factory.addStep(ShellCommand(
1162 name = "dirprepare",
1163 description = "Preparing upload directory structure",
1164 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1165 haltOnFailure = True
1166 ))
1167
1168 factory.addStep(ShellCommand(
1169 name = "linkprepare",
1170 description = "Preparing repository symlink",
1171 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1172 doStepIf = IsNoMasterBuild,
1173 haltOnFailure = True
1174 ))
1175
1176 if enable_kmod_archive:
1177 factory.addStep(ShellCommand(
1178 name = "kmoddirprepare",
1179 description = "Preparing kmod archive upload directory",
1180 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1181 haltOnFailure = True
1182 ))
1183
1184 factory.addStep(ShellCommand(
1185 name = "dirupload",
1186 description = "Uploading directory structure",
1187 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1188 env={'RSYNC_PASSWORD': rsync_bin_key},
1189 haltOnFailure = True,
1190 logEnviron = False,
1191 locks = NetLockUl,
1192 ))
1193
1194 # download remote sha256sums to 'target-sha256sums'
1195 factory.addStep(ShellCommand(
1196 name = "target-sha256sums",
1197 description = "Fetching remote sha256sums for target",
1198 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1199 env={'RSYNC_PASSWORD': rsync_bin_key},
1200 logEnviron = False,
1201 haltOnFailure = False,
1202 flunkOnFailure = False,
1203 warnOnFailure = False,
1204 ))
1205
1206 # build list of files to upload
1207 factory.addStep(FileDownload(
1208 name = "dlsha2rsyncpl",
1209 mastersrc = scripts_dir + '/sha2rsync.pl',
1210 workerdest = "../sha2rsync.pl",
1211 mode = 0o755,
1212 ))
1213
1214 factory.addStep(ShellCommand(
1215 name = "buildlist",
1216 description = "Building list of files to upload",
1217 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1218 haltOnFailure = True,
1219 ))
1220
1221 factory.addStep(FileDownload(
1222 name = "dlrsync.sh",
1223 mastersrc = scripts_dir + '/rsync.sh',
1224 workerdest = "../rsync.sh",
1225 mode = 0o755
1226 ))
1227
1228 # upload new files and update existing ones
1229 factory.addStep(ShellCommand(
1230 name = "targetupload",
1231 description = "Uploading target files",
1232 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1233 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1234 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1235 env={'RSYNC_PASSWORD': rsync_bin_key},
1236 haltOnFailure = True,
1237 logEnviron = False,
1238 ))
1239
1240 # delete files which don't exist locally
1241 factory.addStep(ShellCommand(
1242 name = "targetprune",
1243 description = "Pruning target files",
1244 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1245 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1246 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1247 env={'RSYNC_PASSWORD': rsync_bin_key},
1248 haltOnFailure = True,
1249 logEnviron = False,
1250 locks = NetLockUl,
1251 ))
1252
1253 if enable_kmod_archive:
1254 factory.addStep(ShellCommand(
1255 name = "kmodupload",
1256 description = "Uploading kmod archive",
1257 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1258 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1259 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1260 env={'RSYNC_PASSWORD': rsync_bin_key},
1261 haltOnFailure = True,
1262 logEnviron = False,
1263 locks = NetLockUl,
1264 ))
1265
1266 if rsync_src_url is not None:
1267 factory.addStep(ShellCommand(
1268 name = "sourcelist",
1269 description = "Finding source archives to upload",
1270 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1271 haltOnFailure = True
1272 ))
1273
1274 factory.addStep(ShellCommand(
1275 name = "sourceupload",
1276 description = "Uploading source archives",
1277 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1278 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1279 env={'RSYNC_PASSWORD': rsync_src_key},
1280 haltOnFailure = True,
1281 logEnviron = False,
1282 locks = NetLockUl,
1283 ))
1284
1285 if False:
1286 factory.addStep(ShellCommand(
1287 name = "packageupload",
1288 description = "Uploading package files",
1289 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1290 env={'RSYNC_PASSWORD': rsync_bin_key},
1291 haltOnFailure = False,
1292 flunkOnFailure = False,
1293 warnOnFailure = True,
1294 logEnviron = False,
1295 locks = NetLockUl,
1296 ))
1297
1298 # logs
1299 if False:
1300 factory.addStep(ShellCommand(
1301 name = "upload",
1302 description = "Uploading logs",
1303 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1304 env={'RSYNC_PASSWORD': rsync_bin_key},
1305 haltOnFailure = False,
1306 flunkOnFailure = False,
1307 warnOnFailure = True,
1308 alwaysRun = True,
1309 logEnviron = False,
1310 locks = NetLockUl,
1311 ))
1312
1313 factory.addStep(ShellCommand(
1314 name = "df",
1315 description = "Reporting disk usage",
1316 command=["df", "-h", "."],
1317 env={'LC_ALL': 'C'},
1318 haltOnFailure = False,
1319 flunkOnFailure = False,
1320 warnOnFailure = False,
1321 alwaysRun = True
1322 ))
1323
1324 factory.addStep(ShellCommand(
1325 name = "du",
1326 description = "Reporting estimated file space usage",
1327 command=["du", "-sh", "."],
1328 env={'LC_ALL': 'C'},
1329 haltOnFailure = False,
1330 flunkOnFailure = False,
1331 warnOnFailure = False,
1332 alwaysRun = True
1333 ))
1334
1335 factory.addStep(ShellCommand(
1336 name = "ccachestat",
1337 description = "Reporting ccache stats",
1338 command=["ccache", "-s"],
1339 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1340 want_stderr = False,
1341 haltOnFailure = False,
1342 flunkOnFailure = False,
1343 warnOnFailure = False,
1344 alwaysRun = True,
1345 ))
1346
1347 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1348
1349 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1350 force_factory.addStep(steps.Trigger(
1351 name = "trigger_%s" % target,
1352 description = "Triggering %s build" % target,
1353 schedulerNames = [ "trigger_%s" % target ],
1354 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1355 doStepIf = IsTargetSelected(target)
1356 ))
1357
1358
1359 ####### STATUS TARGETS
1360
1361 # 'status' is a list of Status Targets. The results of each build will be
1362 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1363 # including web pages, email senders, and IRC bots.
1364
1365 if ini.has_option("phase1", "status_bind"):
1366 c['www'] = {
1367 'port': ini.get("phase1", "status_bind"),
1368 'plugins': {
1369 'waterfall_view': True,
1370 'console_view': True,
1371 'grid_view': True
1372 }
1373 }
1374
1375 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1376 c['www']['auth'] = util.UserPasswordAuth([
1377 (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
1378 ])
1379 c['www']['authz'] = util.Authz(
1380 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1381 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
1382 )
1383
1384 c['services'] = []
1385 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1386 irc_host = ini.get("irc", "host")
1387 irc_port = 6667
1388 irc_chan = ini.get("irc", "channel")
1389 irc_nick = ini.get("irc", "nickname")
1390 irc_pass = None
1391
1392 if ini.has_option("irc", "port"):
1393 irc_port = ini.getint("irc", "port")
1394
1395 if ini.has_option("irc", "password"):
1396 irc_pass = ini.get("irc", "password")
1397
1398 irc = reporters.IRC(irc_host, irc_nick,
1399 port = irc_port,
1400 password = irc_pass,
1401 channels = [ irc_chan ],
1402 notify_events = [ 'exception', 'problem', 'recovery' ]
1403 )
1404
1405 c['services'].append(irc)
1406
1407 c['revlink'] = util.RevlinkMatch([
1408 r'https://git.openwrt.org/openwrt/(.*).git'
1409 ],
1410 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1411
1412 ####### DB URL
1413
1414 c['db'] = {
1415 # This specifies what database buildbot uses to store its state. You can leave
1416 # this at its default for all but the largest installations.
1417 'db_url' : "sqlite:///state.sqlite",
1418 }
1419
1420 c['buildbotNetUsageData'] = None