phase2: remove unused tree_expire option and steps
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65 git_ssh = False
66 git_ssh_key = None
67
68 if ini.has_option("phase2", "port"):
69 worker_port = ini.get("phase2", "port")
70
71 if ini.has_option("phase2", "persistent"):
72 persistent = ini.getboolean("phase2", "persistent")
73
74 if ini.has_option("general", "git_ssh"):
75 git_ssh = ini.getboolean("general", "git_ssh")
76
77 if ini.has_option("general", "git_ssh_key"):
78 git_ssh_key = ini.get("general", "git_ssh_key")
79 else:
80 git_ssh = False
81
82 c['workers'] = []
83 max_builds = dict()
84
85 for section in ini.sections():
86 if section.startswith("worker "):
87 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
88 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
89 name = ini.get(section, "name")
90 password = ini.get(section, "password")
91 sl_props = { 'shared_wd': False }
92 max_builds[name] = 1
93
94 if ini.has_option(section, "builds"):
95 max_builds[name] = ini.getint(section, "builds")
96
97 if max_builds[name] == 1:
98 sl_props['shared_wd'] = True
99
100 if ini.has_option(section, "shared_wd"):
101 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
102 if sl_props['shared_wd'] and (max_builds != 1):
103 raise ValueError('max_builds must be 1 with shared workdir!')
104
105 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
106
107 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
108 # This must match the value configured into the buildworkers (with their
109 # --master option)
110 c['protocols'] = {'pb': {'port': worker_port}}
111
112 # coalesce builds
113 c['collapseRequests'] = True
114
115 # Reduce amount of backlog data
116 c['configurators'] = [util.JanitorConfigurator(
117 logHorizon=timedelta(days=3),
118 hour=6,
119 )]
120
121 ####### CHANGESOURCES
122
123 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
124 scripts_dir = os.path.abspath("../scripts")
125
126 rsync_bin_url = ini.get("rsync", "binary_url")
127 rsync_bin_key = ini.get("rsync", "binary_password")
128
129 rsync_src_url = None
130 rsync_src_key = None
131
132 if ini.has_option("rsync", "source_url"):
133 rsync_src_url = ini.get("rsync", "source_url")
134 rsync_src_key = ini.get("rsync", "source_password")
135
136 rsync_sdk_url = None
137 rsync_sdk_key = None
138 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
139
140 if ini.has_option("rsync", "sdk_url"):
141 rsync_sdk_url = ini.get("rsync", "sdk_url")
142
143 if ini.has_option("rsync", "sdk_password"):
144 rsync_sdk_key = ini.get("rsync", "sdk_password")
145
146 if ini.has_option("rsync", "sdk_pattern"):
147 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
148
149 rsync_defopts = ["-4", "-v", "--timeout=120"]
150
151 repo_url = ini.get("repo", "url")
152 repo_branch = "master"
153
154 if ini.has_option("repo", "branch"):
155 repo_branch = ini.get("repo", "branch")
156
157 usign_key = None
158 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
159
160 if ini.has_option("usign", "key"):
161 usign_key = ini.get("usign", "key")
162
163 if ini.has_option("usign", "comment"):
164 usign_comment = ini.get("usign", "comment")
165
166
167 # find arches
168 arches = [ ]
169 archnames = [ ]
170
171 if not os.path.isdir(work_dir+'/source.git'):
172 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
173 else:
174 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
175
176 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
177 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
178 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
179
180 while True:
181 line = findarches.stdout.readline()
182 if not line:
183 break
184 at = line.decode().strip().split()
185 arches.append(at)
186 archnames.append(at[0])
187
188
189 # find feeds
190 feeds = []
191 feedbranches = dict()
192
193 c['change_source'] = []
194
195 def parse_feed_entry(line):
196 parts = line.strip().split()
197 if parts[0].startswith("src-git"):
198 feeds.append(parts)
199 url = parts[2].strip().split(';')
200 branch = url[1] if len(url) > 1 else 'master'
201 feedbranches[url[0]] = branch
202 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
203
204 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
205 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
206
207 line = make.stdout.readline()
208 if line:
209 parse_feed_entry(str(line, 'utf-8'))
210
211 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
212 for line in f:
213 parse_feed_entry(line)
214
215 if len(c['change_source']) == 0:
216 log.err("FATAL ERROR: no change_sources defined, aborting!")
217 sys.exit(-1)
218
219 ####### SCHEDULERS
220
221 # Configure the Schedulers, which decide how to react to incoming changes. In this
222 # case, just kick off a 'basebuild' build
223
224 c['schedulers'] = []
225 c['schedulers'].append(SingleBranchScheduler(
226 name = "all",
227 change_filter = filter.ChangeFilter(
228 filter_fn = lambda change: change.branch == feedbranches[change.repository]
229 ),
230 treeStableTimer = 60,
231 builderNames = archnames))
232
233 c['schedulers'].append(ForceScheduler(
234 name = "force",
235 buttonName = "Force builds",
236 label = "Force build details",
237 builderNames = [ "00_force_build" ],
238
239 codebases = [
240 util.CodebaseParameter(
241 "",
242 label = "Repository",
243 branch = util.FixedParameter(name = "branch", default = ""),
244 revision = util.FixedParameter(name = "revision", default = ""),
245 repository = util.FixedParameter(name = "repository", default = ""),
246 project = util.FixedParameter(name = "project", default = "")
247 )
248 ],
249
250 reason = util.StringParameter(
251 name = "reason",
252 label = "Reason",
253 default = "Trigger build",
254 required = True,
255 size = 80
256 ),
257
258 properties = [
259 util.NestedParameter(
260 name="options",
261 label="Build Options",
262 layout="vertical",
263 fields=[
264 util.ChoiceStringParameter(
265 name = "architecture",
266 label = "Build architecture",
267 default = "all",
268 choices = [ "all" ] + archnames
269 )
270 ]
271 )
272 ]
273 ))
274
275 ####### BUILDERS
276
277 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
278 # what steps, and which workers can execute them. Note that any particular build will
279 # only take place on one worker.
280
281 @properties.renderer
282 def GetDirectorySuffix(props):
283 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
284 if props.hasProperty("release_version"):
285 m = verpat.match(props["release_version"])
286 if m is not None:
287 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
288 return ""
289
290 @properties.renderer
291 def GetNumJobs(props):
292 if props.hasProperty("workername") and props.hasProperty("nproc"):
293 return str(int(props["nproc"]) / max_builds[props["workername"]])
294 else:
295 return "1"
296
297 @properties.renderer
298 def GetCwd(props):
299 if props.hasProperty("builddir"):
300 return props["builddir"]
301 elif props.hasProperty("workdir"):
302 return props["workdir"]
303 else:
304 return "/"
305
306 def IsArchitectureSelected(target):
307 def CheckArchitectureProperty(step):
308 try:
309 options = step.getProperty("options")
310 if type(options) is dict:
311 selected_arch = options.get("architecture", "all")
312 if selected_arch != "all" and selected_arch != target:
313 return False
314 except KeyError:
315 pass
316
317 return True
318
319 return CheckArchitectureProperty
320
321 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
322 try:
323 seckey = base64.b64decode(seckey)
324 except Exception:
325 return None
326
327 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
328 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
329
330 def IsSharedWorkdir(step):
331 return bool(step.getProperty("shared_wd"))
332
333 @defer.inlineCallbacks
334 def getNewestCompleteTime(bldr):
335 """Returns the complete_at of the latest completed and not SKIPPED
336 build request for this builder, or None if there are no such build
337 requests. We need to filter out SKIPPED requests because we're
338 using collapseRequests=True which is unfortunately marking all
339 previous requests as complete when new buildset is created.
340
341 @returns: datetime instance or None, via Deferred
342 """
343
344 bldrid = yield bldr.getBuilderId()
345 completed = yield bldr.master.data.get(
346 ('builders', bldrid, 'buildrequests'),
347 [
348 resultspec.Filter('complete', 'eq', [True]),
349 resultspec.Filter('results', 'ne', [results.SKIPPED]),
350 ],
351 order=['-complete_at'], limit=1)
352 if not completed:
353 return
354
355 complete_at = completed[0]['complete_at']
356
357 last_build = yield bldr.master.data.get(
358 ('builds', ),
359 [
360 resultspec.Filter('builderid', 'eq', [bldrid]),
361 ],
362 order=['-started_at'], limit=1)
363
364 if last_build and last_build[0]:
365 last_complete_at = last_build[0]['complete_at']
366 if last_complete_at and (last_complete_at > complete_at):
367 return last_complete_at
368
369 return complete_at
370
371 @defer.inlineCallbacks
372 def prioritizeBuilders(master, builders):
373 """Returns sorted list of builders by their last timestamp of completed and
374 not skipped build.
375
376 @returns: list of sorted builders
377 """
378
379 def is_building(bldr):
380 return bool(bldr.building) or bool(bldr.old_building)
381
382 def bldr_info(bldr):
383 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
384 d.addCallback(lambda complete_at: (complete_at, bldr))
385 return d
386
387 def bldr_sort(item):
388 (complete_at, bldr) = item
389
390 if not complete_at:
391 date = datetime.min
392 complete_at = date.replace(tzinfo=tzutc())
393
394 if is_building(bldr):
395 date = datetime.max
396 complete_at = date.replace(tzinfo=tzutc())
397
398 return (complete_at, bldr.name)
399
400 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
401 results.sort(key=bldr_sort)
402
403 for r in results:
404 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
405
406 return [r[1] for r in results]
407
408 c['prioritizeBuilders'] = prioritizeBuilders
409 c['builders'] = []
410
411 dlLock = locks.WorkerLock("worker_dl")
412
413 workerNames = [ ]
414
415 for worker in c['workers']:
416 workerNames.append(worker.workername)
417
418 force_factory = BuildFactory()
419
420 c['builders'].append(BuilderConfig(
421 name = "00_force_build",
422 workernames = workerNames,
423 factory = force_factory))
424
425 for arch in arches:
426 ts = arch[1].split('/')
427
428 factory = BuildFactory()
429
430 # setup shared work directory if required
431 factory.addStep(ShellCommand(
432 name = "sharedwd",
433 description = "Setting up shared work directory",
434 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
435 workdir = ".",
436 haltOnFailure = True,
437 doStepIf = IsSharedWorkdir))
438
439 # find number of cores
440 factory.addStep(SetPropertyFromCommand(
441 name = "nproc",
442 property = "nproc",
443 description = "Finding number of CPUs",
444 command = ["nproc"]))
445
446 # prepare workspace
447 factory.addStep(FileDownload(
448 mastersrc = scripts_dir + '/cleanup.sh',
449 workerdest = "../cleanup.sh",
450 mode = 0o755))
451
452 if not persistent:
453 factory.addStep(ShellCommand(
454 name = "cleanold",
455 description = "Cleaning previous builds",
456 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
457 workdir = ".",
458 haltOnFailure = True,
459 timeout = 2400))
460
461 factory.addStep(ShellCommand(
462 name = "cleanup",
463 description = "Cleaning work area",
464 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
465 workdir = ".",
466 haltOnFailure = True,
467 timeout = 2400))
468
469 factory.addStep(ShellCommand(
470 name = "mksdkdir",
471 description = "Preparing SDK directory",
472 command = ["mkdir", "-p", "sdk"],
473 haltOnFailure = True))
474
475 factory.addStep(ShellCommand(
476 name = "downloadsdk",
477 description = "Downloading SDK archive",
478 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
479 env={'RSYNC_PASSWORD': rsync_sdk_key},
480 haltOnFailure = True,
481 logEnviron = False))
482
483 factory.addStep(ShellCommand(
484 name = "unpacksdk",
485 description = "Unpacking SDK archive",
486 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
487 haltOnFailure = True))
488
489 factory.addStep(ShellCommand(
490 name = "updatesdk",
491 description = "Updating SDK",
492 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
493 haltOnFailure = True))
494
495 factory.addStep(ShellCommand(
496 name = "cleancmdlinks",
497 description = "Sanitizing host command symlinks",
498 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
499 haltOnFailure = True))
500
501 factory.addStep(StringDownload(
502 name = "writeversionmk",
503 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
504 workerdest = "sdk/getversion.mk",
505 mode = 0o755))
506
507 factory.addStep(SetPropertyFromCommand(
508 name = "getversion",
509 property = "release_version",
510 description = "Finding SDK release version",
511 workdir = "build/sdk",
512 command = ["make", "-f", "getversion.mk"]))
513
514 # install build key
515 if usign_key is not None:
516 factory.addStep(StringDownload(
517 name = "dlkeybuildpub",
518 s = UsignSec2Pub(usign_key, usign_comment),
519 workerdest = "sdk/key-build.pub",
520 mode = 0o600))
521
522 factory.addStep(StringDownload(
523 name = "dlkeybuild",
524 s = "# fake private key",
525 workerdest = "sdk/key-build",
526 mode = 0o600))
527
528 factory.addStep(StringDownload(
529 name = "dlkeybuilducert",
530 s = "# fake certificate",
531 workerdest = "sdk/key-build.ucert",
532 mode = 0o600))
533
534 factory.addStep(ShellCommand(
535 name = "mkdldir",
536 description = "Preparing download directory",
537 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
538 haltOnFailure = True))
539
540 factory.addStep(ShellCommand(
541 name = "mkconf",
542 description = "Preparing SDK configuration",
543 workdir = "build/sdk",
544 command = ["sh", "-c", "rm -f .config && make defconfig"]))
545
546 factory.addStep(FileDownload(
547 mastersrc = scripts_dir + '/ccache.sh',
548 workerdest = 'sdk/ccache.sh',
549 mode = 0o755))
550
551 factory.addStep(ShellCommand(
552 name = "prepccache",
553 description = "Preparing ccache",
554 workdir = "build/sdk",
555 command = ["./ccache.sh"],
556 haltOnFailure = True))
557
558 if git_ssh:
559 factory.addStep(StringDownload(
560 name = "dlgitclonekey",
561 s = git_ssh_key,
562 workerdest = "../git-clone.key",
563 mode = 0o600))
564
565 factory.addStep(ShellCommand(
566 name = "patchfeedsconf",
567 description = "Patching feeds.conf",
568 workdir = "build/sdk",
569 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
570 haltOnFailure = True))
571
572 factory.addStep(ShellCommand(
573 name = "updatefeeds",
574 description = "Updating feeds",
575 workdir = "build/sdk",
576 command = ["./scripts/feeds", "update", "-f"],
577 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
578 haltOnFailure = True))
579
580 if git_ssh:
581 factory.addStep(ShellCommand(
582 name = "rmfeedsconf",
583 description = "Removing feeds.conf",
584 workdir = "build/sdk",
585 command=["rm", "feeds.conf"],
586 haltOnFailure = True))
587
588 factory.addStep(ShellCommand(
589 name = "installfeeds",
590 description = "Installing feeds",
591 workdir = "build/sdk",
592 command = ["./scripts/feeds", "install", "-a"],
593 haltOnFailure = True))
594
595 factory.addStep(ShellCommand(
596 name = "logclear",
597 description = "Clearing failure logs",
598 workdir = "build/sdk",
599 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
600 haltOnFailure = False,
601 flunkOnFailure = False,
602 warnOnFailure = True,
603 ))
604
605 factory.addStep(ShellCommand(
606 name = "compile",
607 description = "Building packages",
608 workdir = "build/sdk",
609 timeout = 3600,
610 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
611 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
612 haltOnFailure = True))
613
614 factory.addStep(ShellCommand(
615 name = "mkfeedsconf",
616 description = "Generating pinned feeds.conf",
617 workdir = "build/sdk",
618 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
619
620 factory.addStep(ShellCommand(
621 name = "checksums",
622 description = "Calculating checksums",
623 descriptionDone="Checksums calculated",
624 workdir = "build/sdk",
625 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
626 haltOnFailure = True
627 ))
628
629 if ini.has_option("gpg", "key") or usign_key is not None:
630 factory.addStep(MasterShellCommand(
631 name = "signprepare",
632 description = "Preparing temporary signing directory",
633 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
634 haltOnFailure = True
635 ))
636
637 factory.addStep(ShellCommand(
638 name = "signpack",
639 description = "Packing files to sign",
640 workdir = "build/sdk",
641 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
642 haltOnFailure = True
643 ))
644
645 factory.addStep(FileUpload(
646 workersrc = "sdk/sign.tar.gz",
647 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
648 haltOnFailure = True
649 ))
650
651 factory.addStep(MasterShellCommand(
652 name = "signfiles",
653 description = "Signing files",
654 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
655 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
656 haltOnFailure = True
657 ))
658
659 factory.addStep(FileDownload(
660 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
661 workerdest = "sdk/sign.tar.gz",
662 haltOnFailure = True
663 ))
664
665 factory.addStep(ShellCommand(
666 name = "signunpack",
667 description = "Unpacking signed files",
668 workdir = "build/sdk",
669 command = ["tar", "-xzf", "sign.tar.gz"],
670 haltOnFailure = True
671 ))
672
673 # download remote sha256sums to 'target-sha256sums'
674 factory.addStep(ShellCommand(
675 name = "target-sha256sums",
676 description = "Fetching remote sha256sums for arch",
677 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
678 env={'RSYNC_PASSWORD': rsync_bin_key},
679 logEnviron = False,
680 haltOnFailure = False,
681 flunkOnFailure = False,
682 warnOnFailure = False,
683 ))
684
685 factory.addStep(FileDownload(
686 name="dlrsync.sh",
687 mastersrc = scripts_dir + "/rsync.sh",
688 workerdest = "../rsync.sh",
689 mode = 0o755
690 ))
691
692 factory.addStep(FileDownload(
693 name = "dlsha2rsyncpl",
694 mastersrc = "sha2rsync.pl",
695 workerdest = "../sha2rsync.pl",
696 mode = 0o755,
697 ))
698
699 factory.addStep(ShellCommand(
700 name = "buildlist",
701 description = "Building list of files to upload",
702 workdir = "build/sdk",
703 command = ["../../../sha2rsync.pl", "../../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
704 haltOnFailure = True,
705 ))
706
707 factory.addStep(ShellCommand(
708 name = "uploadprepare",
709 description = "Preparing package directory",
710 workdir = "build/sdk",
711 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
712 env={'RSYNC_PASSWORD': rsync_bin_key},
713 haltOnFailure = True,
714 logEnviron = False
715 ))
716
717 factory.addStep(ShellCommand(
718 name = "packageupload",
719 description = "Uploading package files",
720 workdir = "build/sdk",
721 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
722 env={'RSYNC_PASSWORD': rsync_bin_key},
723 haltOnFailure = True,
724 logEnviron = False
725 ))
726
727 factory.addStep(ShellCommand(
728 name = "packageprune",
729 description = "Pruning package files",
730 workdir = "build/sdk",
731 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
732 env={'RSYNC_PASSWORD': rsync_bin_key},
733 haltOnFailure = True,
734 logEnviron = False
735 ))
736
737 factory.addStep(ShellCommand(
738 name = "logprepare",
739 description = "Preparing log directory",
740 workdir = "build/sdk",
741 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
742 env={'RSYNC_PASSWORD': rsync_bin_key},
743 haltOnFailure = True,
744 logEnviron = False
745 ))
746
747 factory.addStep(ShellCommand(
748 name = "logfind",
749 description = "Finding failure logs",
750 workdir = "build/sdk/logs/package/feeds",
751 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
752 haltOnFailure = False,
753 flunkOnFailure = False,
754 warnOnFailure = True,
755 ))
756
757 factory.addStep(ShellCommand(
758 name = "logcollect",
759 description = "Collecting failure logs",
760 workdir = "build/sdk",
761 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
762 haltOnFailure = False,
763 flunkOnFailure = False,
764 warnOnFailure = True,
765 ))
766
767 factory.addStep(ShellCommand(
768 name = "logupload",
769 description = "Uploading failure logs",
770 workdir = "build/sdk",
771 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
772 env={'RSYNC_PASSWORD': rsync_bin_key},
773 haltOnFailure = False,
774 flunkOnFailure = False,
775 warnOnFailure = True,
776 logEnviron = False
777 ))
778
779 if rsync_src_url is not None:
780 factory.addStep(ShellCommand(
781 name = "sourcelist",
782 description = "Finding source archives to upload",
783 workdir = "build/sdk",
784 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
785 haltOnFailure = True
786 ))
787
788 factory.addStep(ShellCommand(
789 name = "sourceupload",
790 description = "Uploading source archives",
791 workdir = "build/sdk",
792 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
793 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
794 env={'RSYNC_PASSWORD': rsync_src_key},
795 haltOnFailure = False,
796 flunkOnFailure = False,
797 warnOnFailure = True,
798 logEnviron = False
799 ))
800
801 factory.addStep(ShellCommand(
802 name = "df",
803 description = "Reporting disk usage",
804 command=["df", "-h", "."],
805 env={'LC_ALL': 'C'},
806 haltOnFailure = False,
807 flunkOnFailure = False,
808 warnOnFailure = False,
809 alwaysRun = True
810 ))
811
812 factory.addStep(ShellCommand(
813 name = "du",
814 description = "Reporting estimated file space usage",
815 command=["du", "-sh", "."],
816 env={'LC_ALL': 'C'},
817 haltOnFailure = False,
818 flunkOnFailure = False,
819 warnOnFailure = False,
820 alwaysRun = True
821 ))
822
823 factory.addStep(ShellCommand(
824 name = "ccachestat",
825 description = "Reporting ccache stats",
826 command=["ccache", "-s"],
827 want_stderr = False,
828 haltOnFailure = False,
829 flunkOnFailure = False,
830 warnOnFailure = False,
831 alwaysRun = True,
832 ))
833
834 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
835
836 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
837 force_factory.addStep(steps.Trigger(
838 name = "trigger_%s" % arch[0],
839 description = "Triggering %s build" % arch[0],
840 schedulerNames = [ "trigger_%s" % arch[0] ],
841 set_properties = { "reason": Property("reason") },
842 doStepIf = IsArchitectureSelected(arch[0])
843 ))
844
845 ####### STATUS arches
846
847 # 'status' is a list of Status arches. The results of each build will be
848 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
849 # including web pages, email senders, and IRC bots.
850
851 if ini.has_option("phase2", "status_bind"):
852 c['www'] = {
853 'port': ini.get("phase2", "status_bind"),
854 'plugins': {
855 'waterfall_view': True,
856 'console_view': True,
857 'grid_view': True
858 }
859 }
860
861 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
862 c['www']['auth'] = util.UserPasswordAuth([
863 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
864 ])
865 c['www']['authz'] = util.Authz(
866 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
867 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
868 )
869
870 ####### PROJECT IDENTITY
871
872 # the 'title' string will appear at the top of this buildbot
873 # installation's html.WebStatus home page (linked to the
874 # 'titleURL') and is embedded in the title of the waterfall HTML page.
875
876 c['title'] = ini.get("general", "title")
877 c['titleURL'] = ini.get("general", "title_url")
878
879 # the 'buildbotURL' string should point to the location where the buildbot's
880 # internal web server (usually the html.WebStatus page) is visible. This
881 # typically uses the port number set in the Waterfall 'status' entry, but
882 # with an externally-visible host name which the buildbot cannot figure out
883 # without some help.
884
885 c['buildbotURL'] = buildbot_url
886
887 ####### DB URL
888
889 c['db'] = {
890 # This specifies what database buildbot uses to store its state. You can leave
891 # this at its default for all but the largest installations.
892 'db_url' : "sqlite:///state.sqlite",
893 }
894
895 c['buildbotNetUsageData'] = None