e19a7314cc67267d922c81040e09aa146f47f754
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
38
39
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
43
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
46
47 buildbot_url = ini.get("phase2", "buildbot_url")
48
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
51
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
55
56 ####### BUILDWORKERS
57
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
61
62 worker_port = 9990
63 persistent = False
64 other_builds = 0
65 tree_expire = 0
66 git_ssh = False
67 git_ssh_key = None
68
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
71
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
74
75 if ini.has_option("phase2", "other_builds"):
76 other_builds = ini.getint("phase2", "other_builds")
77
78 if ini.has_option("phase2", "expire"):
79 tree_expire = ini.getint("phase2", "expire")
80
81 if ini.has_option("general", "git_ssh"):
82 git_ssh = ini.getboolean("general", "git_ssh")
83
84 if ini.has_option("general", "git_ssh_key"):
85 git_ssh_key = ini.get("general", "git_ssh_key")
86 else:
87 git_ssh = False
88
89 c['workers'] = []
90 max_builds = dict()
91
92 for section in ini.sections():
93 if section.startswith("worker "):
94 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
95 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
96 name = ini.get(section, "name")
97 password = ini.get(section, "password")
98 sl_props = { 'shared_wd': False }
99 max_builds[name] = 1
100
101 if ini.has_option(section, "builds"):
102 max_builds[name] = ini.getint(section, "builds")
103
104 if max_builds[name] == 1:
105 sl_props['shared_wd'] = True
106
107 if ini.has_option(section, "shared_wd"):
108 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
109 if sl_props['shared_wd'] and (max_builds != 1):
110 raise ValueError('max_builds must be 1 with shared workdir!')
111
112 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
113
114 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
115 # This must match the value configured into the buildworkers (with their
116 # --master option)
117 c['protocols'] = {'pb': {'port': worker_port}}
118
119 # coalesce builds
120 c['collapseRequests'] = True
121
122 # Reduce amount of backlog data
123 c['configurators'] = [util.JanitorConfigurator(
124 logHorizon=timedelta(days=3),
125 hour=6,
126 )]
127
128 ####### CHANGESOURCES
129
130 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
131 scripts_dir = os.path.abspath("../scripts")
132
133 rsync_bin_url = ini.get("rsync", "binary_url")
134 rsync_bin_key = ini.get("rsync", "binary_password")
135
136 rsync_src_url = None
137 rsync_src_key = None
138
139 if ini.has_option("rsync", "source_url"):
140 rsync_src_url = ini.get("rsync", "source_url")
141 rsync_src_key = ini.get("rsync", "source_password")
142
143 rsync_sdk_url = None
144 rsync_sdk_key = None
145 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
146
147 if ini.has_option("rsync", "sdk_url"):
148 rsync_sdk_url = ini.get("rsync", "sdk_url")
149
150 if ini.has_option("rsync", "sdk_password"):
151 rsync_sdk_key = ini.get("rsync", "sdk_password")
152
153 if ini.has_option("rsync", "sdk_pattern"):
154 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
155
156 repo_url = ini.get("repo", "url")
157 repo_branch = "master"
158
159 if ini.has_option("repo", "branch"):
160 repo_branch = ini.get("repo", "branch")
161
162 usign_key = None
163 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
164
165 if ini.has_option("usign", "key"):
166 usign_key = ini.get("usign", "key")
167
168 if ini.has_option("usign", "comment"):
169 usign_comment = ini.get("usign", "comment")
170
171
172 # find arches
173 arches = [ ]
174 archnames = [ ]
175
176 if not os.path.isdir(work_dir+'/source.git'):
177 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
178 else:
179 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
180
181 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
182 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
183 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
184
185 while True:
186 line = findarches.stdout.readline()
187 if not line:
188 break
189 at = line.decode().strip().split()
190 arches.append(at)
191 archnames.append(at[0])
192
193
194 # find feeds
195 feeds = []
196 feedbranches = dict()
197
198 c['change_source'] = []
199
200 def parse_feed_entry(line):
201 parts = line.strip().split()
202 if parts[0].startswith("src-git"):
203 feeds.append(parts)
204 url = parts[2].strip().split(';')
205 branch = url[1] if len(url) > 1 else 'master'
206 feedbranches[url[0]] = branch
207 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
208
209 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
210 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
211
212 line = make.stdout.readline()
213 if line:
214 parse_feed_entry(str(line, 'utf-8'))
215
216 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
217 for line in f:
218 parse_feed_entry(line)
219
220 if len(c['change_source']) == 0:
221 log.err("FATAL ERROR: no change_sources defined, aborting!")
222 sys.exit(-1)
223
224 ####### SCHEDULERS
225
226 # Configure the Schedulers, which decide how to react to incoming changes. In this
227 # case, just kick off a 'basebuild' build
228
229 c['schedulers'] = []
230 c['schedulers'].append(SingleBranchScheduler(
231 name = "all",
232 change_filter = filter.ChangeFilter(
233 filter_fn = lambda change: change.branch == feedbranches[change.repository]
234 ),
235 treeStableTimer = 60,
236 builderNames = archnames))
237
238 c['schedulers'].append(ForceScheduler(
239 name = "force",
240 buttonName = "Force builds",
241 label = "Force build details",
242 builderNames = [ "00_force_build" ],
243
244 codebases = [
245 util.CodebaseParameter(
246 "",
247 label = "Repository",
248 branch = util.FixedParameter(name = "branch", default = ""),
249 revision = util.FixedParameter(name = "revision", default = ""),
250 repository = util.FixedParameter(name = "repository", default = ""),
251 project = util.FixedParameter(name = "project", default = "")
252 )
253 ],
254
255 reason = util.StringParameter(
256 name = "reason",
257 label = "Reason",
258 default = "Trigger build",
259 required = True,
260 size = 80
261 ),
262
263 properties = [
264 util.NestedParameter(
265 name="options",
266 label="Build Options",
267 layout="vertical",
268 fields=[
269 util.ChoiceStringParameter(
270 name = "architecture",
271 label = "Build architecture",
272 default = "all",
273 choices = [ "all" ] + archnames
274 )
275 ]
276 )
277 ]
278 ))
279
280 ####### BUILDERS
281
282 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
283 # what steps, and which workers can execute them. Note that any particular build will
284 # only take place on one worker.
285
286 def GetDirectorySuffix(props):
287 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
288 if props.hasProperty("release_version"):
289 m = verpat.match(props["release_version"])
290 if m is not None:
291 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
292 return ""
293
294 def GetNumJobs(props):
295 if props.hasProperty("workername") and props.hasProperty("nproc"):
296 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
297 else:
298 return 1
299
300 def GetCwd(props):
301 if props.hasProperty("builddir"):
302 return props["builddir"]
303 elif props.hasProperty("workdir"):
304 return props["workdir"]
305 else:
306 return "/"
307
308 def IsArchitectureSelected(target):
309 def CheckArchitectureProperty(step):
310 try:
311 options = step.getProperty("options")
312 if type(options) is dict:
313 selected_arch = options.get("architecture", "all")
314 if selected_arch != "all" and selected_arch != target:
315 return False
316 except KeyError:
317 pass
318
319 return True
320
321 return CheckArchitectureProperty
322
323 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
324 try:
325 seckey = base64.b64decode(seckey)
326 except Exception:
327 return None
328
329 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
330 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
331
332 def IsSharedWorkdir(step):
333 return bool(step.getProperty("shared_wd"))
334
335 @defer.inlineCallbacks
336 def getNewestCompleteTime(bldr):
337 """Returns the complete_at of the latest completed and not SKIPPED
338 build request for this builder, or None if there are no such build
339 requests. We need to filter out SKIPPED requests because we're
340 using collapseRequests=True which is unfortunately marking all
341 previous requests as complete when new buildset is created.
342
343 @returns: datetime instance or None, via Deferred
344 """
345
346 bldrid = yield bldr.getBuilderId()
347 completed = yield bldr.master.data.get(
348 ('builders', bldrid, 'buildrequests'),
349 [
350 resultspec.Filter('complete', 'eq', [True]),
351 resultspec.Filter('results', 'ne', [results.SKIPPED]),
352 ],
353 order=['-complete_at'], limit=1)
354 if not completed:
355 return
356
357 complete_at = completed[0]['complete_at']
358
359 last_build = yield bldr.master.data.get(
360 ('builds', ),
361 [
362 resultspec.Filter('builderid', 'eq', [bldrid]),
363 ],
364 order=['-started_at'], limit=1)
365
366 if last_build and last_build[0]:
367 last_complete_at = last_build[0]['complete_at']
368 if last_complete_at and (last_complete_at > complete_at):
369 return last_complete_at
370
371 return complete_at
372
373 @defer.inlineCallbacks
374 def prioritizeBuilders(master, builders):
375 """Returns sorted list of builders by their last timestamp of completed and
376 not skipped build.
377
378 @returns: list of sorted builders
379 """
380
381 def is_building(bldr):
382 return bool(bldr.building) or bool(bldr.old_building)
383
384 def bldr_info(bldr):
385 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
386 d.addCallback(lambda complete_at: (complete_at, bldr))
387 return d
388
389 def bldr_sort(item):
390 (complete_at, bldr) = item
391
392 if not complete_at:
393 date = datetime.min
394 complete_at = date.replace(tzinfo=tzutc())
395
396 if is_building(bldr):
397 date = datetime.max
398 complete_at = date.replace(tzinfo=tzutc())
399
400 return (complete_at, bldr.name)
401
402 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
403 results.sort(key=bldr_sort)
404
405 for r in results:
406 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
407
408 return [r[1] for r in results]
409
410 c['prioritizeBuilders'] = prioritizeBuilders
411 c['builders'] = []
412
413 dlLock = locks.WorkerLock("worker_dl")
414
415 workerNames = [ ]
416
417 for worker in c['workers']:
418 workerNames.append(worker.workername)
419
420 force_factory = BuildFactory()
421
422 c['builders'].append(BuilderConfig(
423 name = "00_force_build",
424 workernames = workerNames,
425 factory = force_factory))
426
427 for arch in arches:
428 ts = arch[1].split('/')
429
430 factory = BuildFactory()
431
432 # setup shared work directory if required
433 factory.addStep(ShellCommand(
434 name = "sharedwd",
435 description = "Setting up shared work directory",
436 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
437 workdir = ".",
438 haltOnFailure = True,
439 doStepIf = IsSharedWorkdir))
440
441 # find number of cores
442 factory.addStep(SetProperty(
443 name = "nproc",
444 property = "nproc",
445 description = "Finding number of CPUs",
446 command = ["nproc"]))
447
448 # prepare workspace
449 factory.addStep(FileDownload(
450 mastersrc = scripts_dir + '/cleanup.sh',
451 workerdest = "../cleanup.sh",
452 mode = 0o755))
453
454 if not persistent:
455 factory.addStep(ShellCommand(
456 name = "cleanold",
457 description = "Cleaning previous builds",
458 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
459 workdir = ".",
460 haltOnFailure = True,
461 timeout = 2400))
462
463 factory.addStep(ShellCommand(
464 name = "cleanup",
465 description = "Cleaning work area",
466 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
467 workdir = ".",
468 haltOnFailure = True,
469 timeout = 2400))
470
471 # expire tree if needed
472 elif tree_expire > 0:
473 factory.addStep(FileDownload(
474 mastersrc = scripts_dir + '/expire.sh',
475 workerdest = "../expire.sh",
476 mode = 0o755))
477
478 factory.addStep(ShellCommand(
479 name = "expire",
480 description = "Checking for build tree expiry",
481 command = ["./expire.sh", str(tree_expire)],
482 workdir = ".",
483 haltOnFailure = True,
484 timeout = 2400))
485
486 factory.addStep(ShellCommand(
487 name = "mksdkdir",
488 description = "Preparing SDK directory",
489 command = ["mkdir", "-p", "sdk"],
490 haltOnFailure = True))
491
492 factory.addStep(ShellCommand(
493 name = "downloadsdk",
494 description = "Downloading SDK archive",
495 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
496 env={'RSYNC_PASSWORD': rsync_sdk_key},
497 haltOnFailure = True,
498 logEnviron = False))
499
500 factory.addStep(ShellCommand(
501 name = "unpacksdk",
502 description = "Unpacking SDK archive",
503 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
504 haltOnFailure = True))
505
506 factory.addStep(ShellCommand(
507 name = "updatesdk",
508 description = "Updating SDK",
509 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
510 haltOnFailure = True))
511
512 factory.addStep(ShellCommand(
513 name = "cleancmdlinks",
514 description = "Sanitizing host command symlinks",
515 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
516 haltOnFailure = True))
517
518 factory.addStep(StringDownload(
519 name = "writeversionmk",
520 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
521 workerdest = "sdk/getversion.mk",
522 mode = 0o755))
523
524 factory.addStep(SetProperty(
525 name = "getversion",
526 property = "release_version",
527 description = "Finding SDK release version",
528 workdir = "build/sdk",
529 command = ["make", "-f", "getversion.mk"]))
530
531 # install build key
532 if usign_key is not None:
533 factory.addStep(StringDownload(
534 name = "dlkeybuildpub",
535 s = UsignSec2Pub(usign_key, usign_comment),
536 workerdest = "sdk/key-build.pub",
537 mode = 0o600))
538
539 factory.addStep(StringDownload(
540 name = "dlkeybuild",
541 s = "# fake private key",
542 workerdest = "sdk/key-build",
543 mode = 0o600))
544
545 factory.addStep(StringDownload(
546 name = "dlkeybuilducert",
547 s = "# fake certificate",
548 workerdest = "sdk/key-build.ucert",
549 mode = 0o600))
550
551 factory.addStep(ShellCommand(
552 name = "mkdldir",
553 description = "Preparing download directory",
554 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
555 haltOnFailure = True))
556
557 factory.addStep(ShellCommand(
558 name = "mkconf",
559 description = "Preparing SDK configuration",
560 workdir = "build/sdk",
561 command = ["sh", "-c", "rm -f .config && make defconfig"]))
562
563 factory.addStep(FileDownload(
564 mastersrc = scripts_dir + '/ccache.sh',
565 workerdest = 'sdk/ccache.sh',
566 mode = 0o755))
567
568 factory.addStep(ShellCommand(
569 name = "prepccache",
570 description = "Preparing ccache",
571 workdir = "build/sdk",
572 command = ["./ccache.sh"],
573 haltOnFailure = True))
574
575 factory.addStep(ShellCommand(
576 name = "patchfeedsconfgitfull",
577 description = "Patching feeds.conf to use src-git-full",
578 workdir = "build/sdk",
579 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
580 haltOnFailure = True))
581
582 if git_ssh:
583 factory.addStep(StringDownload(
584 name = "dlgitclonekey",
585 s = git_ssh_key,
586 workerdest = "../git-clone.key",
587 mode = 0o600))
588
589 factory.addStep(ShellCommand(
590 name = "patchfeedsconf",
591 description = "Patching feeds.conf to use SSH cloning",
592 workdir = "build/sdk",
593 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
594 haltOnFailure = True))
595
596 factory.addStep(ShellCommand(
597 name = "updatefeeds",
598 description = "Updating feeds",
599 workdir = "build/sdk",
600 command = ["./scripts/feeds", "update", "-f"],
601 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
602 haltOnFailure = True))
603
604 if git_ssh:
605 factory.addStep(ShellCommand(
606 name = "rmfeedsconf",
607 description = "Removing feeds.conf",
608 workdir = "build/sdk",
609 command=["rm", "feeds.conf"],
610 haltOnFailure = True))
611
612 factory.addStep(ShellCommand(
613 name = "installfeeds",
614 description = "Installing feeds",
615 workdir = "build/sdk",
616 command = ["./scripts/feeds", "install", "-a"],
617 haltOnFailure = True))
618
619 factory.addStep(ShellCommand(
620 name = "logclear",
621 description = "Clearing failure logs",
622 workdir = "build/sdk",
623 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
624 haltOnFailure = False,
625 flunkOnFailure = False,
626 warnOnFailure = True,
627 ))
628
629 factory.addStep(ShellCommand(
630 name = "compile",
631 description = "Building packages",
632 workdir = "build/sdk",
633 timeout = 3600,
634 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
635 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
636 haltOnFailure = True))
637
638 factory.addStep(ShellCommand(
639 name = "mkfeedsconf",
640 description = "Generating pinned feeds.conf",
641 workdir = "build/sdk",
642 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
643
644 if ini.has_option("gpg", "key") or usign_key is not None:
645 factory.addStep(MasterShellCommand(
646 name = "signprepare",
647 description = "Preparing temporary signing directory",
648 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
649 haltOnFailure = True
650 ))
651
652 factory.addStep(ShellCommand(
653 name = "signpack",
654 description = "Packing files to sign",
655 workdir = "build/sdk",
656 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
657 haltOnFailure = True
658 ))
659
660 factory.addStep(FileUpload(
661 workersrc = "sdk/sign.tar.gz",
662 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
663 haltOnFailure = True
664 ))
665
666 factory.addStep(MasterShellCommand(
667 name = "signfiles",
668 description = "Signing files",
669 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
670 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
671 haltOnFailure = True
672 ))
673
674 factory.addStep(FileDownload(
675 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
676 workerdest = "sdk/sign.tar.gz",
677 haltOnFailure = True
678 ))
679
680 factory.addStep(ShellCommand(
681 name = "signunpack",
682 description = "Unpacking signed files",
683 workdir = "build/sdk",
684 command = ["tar", "-xzf", "sign.tar.gz"],
685 haltOnFailure = True
686 ))
687
688 factory.addStep(ShellCommand(
689 name = "uploadprepare",
690 description = "Preparing package directory",
691 workdir = "build/sdk",
692 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
693 env={'RSYNC_PASSWORD': rsync_bin_key},
694 haltOnFailure = True,
695 logEnviron = False
696 ))
697
698 factory.addStep(ShellCommand(
699 name = "packageupload",
700 description = "Uploading package files",
701 workdir = "build/sdk",
702 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
703 env={'RSYNC_PASSWORD': rsync_bin_key},
704 haltOnFailure = True,
705 logEnviron = False
706 ))
707
708 factory.addStep(ShellCommand(
709 name = "logprepare",
710 description = "Preparing log directory",
711 workdir = "build/sdk",
712 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
713 env={'RSYNC_PASSWORD': rsync_bin_key},
714 haltOnFailure = True,
715 logEnviron = False
716 ))
717
718 factory.addStep(ShellCommand(
719 name = "logfind",
720 description = "Finding failure logs",
721 workdir = "build/sdk/logs/package/feeds",
722 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
723 haltOnFailure = False,
724 flunkOnFailure = False,
725 warnOnFailure = True,
726 ))
727
728 factory.addStep(ShellCommand(
729 name = "logcollect",
730 description = "Collecting failure logs",
731 workdir = "build/sdk",
732 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
733 haltOnFailure = False,
734 flunkOnFailure = False,
735 warnOnFailure = True,
736 ))
737
738 factory.addStep(ShellCommand(
739 name = "logupload",
740 description = "Uploading failure logs",
741 workdir = "build/sdk",
742 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
743 env={'RSYNC_PASSWORD': rsync_bin_key},
744 haltOnFailure = False,
745 flunkOnFailure = False,
746 warnOnFailure = True,
747 logEnviron = False
748 ))
749
750 if rsync_src_url is not None:
751 factory.addStep(ShellCommand(
752 name = "sourcelist",
753 description = "Finding source archives to upload",
754 workdir = "build/sdk",
755 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
756 haltOnFailure = True
757 ))
758
759 factory.addStep(ShellCommand(
760 name = "sourceupload",
761 description = "Uploading source archives",
762 workdir = "build/sdk",
763 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
764 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
765 env={'RSYNC_PASSWORD': rsync_src_key},
766 haltOnFailure = False,
767 flunkOnFailure = False,
768 warnOnFailure = True,
769 logEnviron = False
770 ))
771
772 factory.addStep(ShellCommand(
773 name = "df",
774 description = "Reporting disk usage",
775 command=["df", "-h", "."],
776 env={'LC_ALL': 'C'},
777 haltOnFailure = False,
778 flunkOnFailure = False,
779 warnOnFailure = False,
780 alwaysRun = True
781 ))
782
783 factory.addStep(ShellCommand(
784 name = "du",
785 description = "Reporting estimated file space usage",
786 command=["du", "-sh", "."],
787 env={'LC_ALL': 'C'},
788 haltOnFailure = False,
789 flunkOnFailure = False,
790 warnOnFailure = False,
791 alwaysRun = True
792 ))
793
794 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
795
796 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
797 force_factory.addStep(steps.Trigger(
798 name = "trigger_%s" % arch[0],
799 description = "Triggering %s build" % arch[0],
800 schedulerNames = [ "trigger_%s" % arch[0] ],
801 set_properties = { "reason": Property("reason") },
802 doStepIf = IsArchitectureSelected(arch[0])
803 ))
804
805 ####### STATUS arches
806
807 # 'status' is a list of Status arches. The results of each build will be
808 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
809 # including web pages, email senders, and IRC bots.
810
811 if ini.has_option("phase2", "status_bind"):
812 c['www'] = {
813 'port': ini.get("phase2", "status_bind"),
814 'plugins': {
815 'waterfall_view': True,
816 'console_view': True,
817 'grid_view': True
818 }
819 }
820
821 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
822 c['www']['auth'] = util.UserPasswordAuth([
823 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
824 ])
825 c['www']['authz'] = util.Authz(
826 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
827 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
828 )
829
830 ####### PROJECT IDENTITY
831
832 # the 'title' string will appear at the top of this buildbot
833 # installation's html.WebStatus home page (linked to the
834 # 'titleURL') and is embedded in the title of the waterfall HTML page.
835
836 c['title'] = ini.get("general", "title")
837 c['titleURL'] = ini.get("general", "title_url")
838
839 # the 'buildbotURL' string should point to the location where the buildbot's
840 # internal web server (usually the html.WebStatus page) is visible. This
841 # typically uses the port number set in the Waterfall 'status' entry, but
842 # with an externally-visible host name which the buildbot cannot figure out
843 # without some help.
844
845 c['buildbotURL'] = buildbot_url
846
847 ####### DB URL
848
849 c['db'] = {
850 # This specifies what database buildbot uses to store its state. You can leave
851 # this at its default for all but the largest installations.
852 'db_url' : "sqlite:///state.sqlite",
853 }
854
855 c['buildbotNetUsageData'] = None