phase2: fix obsolete assumption about feed methods
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import random
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
38
39
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
43
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
46
47 buildbot_url = ini.get("phase2", "buildbot_url")
48
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
51
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
55
56 ####### BUILDWORKERS
57
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
61
62 worker_port = 9990
63 persistent = False
64 other_builds = 0
65 tree_expire = 0
66 git_ssh = False
67 git_ssh_key = None
68
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
71
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
74
75 if ini.has_option("phase2", "other_builds"):
76 other_builds = ini.getint("phase2", "other_builds")
77
78 if ini.has_option("phase2", "expire"):
79 tree_expire = ini.getint("phase2", "expire")
80
81 if ini.has_option("general", "git_ssh"):
82 git_ssh = ini.getboolean("general", "git_ssh")
83
84 if ini.has_option("general", "git_ssh_key"):
85 git_ssh_key = ini.get("general", "git_ssh_key")
86 else:
87 git_ssh = False
88
89 c['workers'] = []
90 max_builds = dict()
91
92 for section in ini.sections():
93 if section.startswith("worker "):
94 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
95 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
96 name = ini.get(section, "name")
97 password = ini.get(section, "password")
98 sl_props = { 'shared_wd': False }
99 max_builds[name] = 1
100
101 if ini.has_option(section, "builds"):
102 max_builds[name] = ini.getint(section, "builds")
103
104 if max_builds[name] == 1:
105 sl_props['shared_wd'] = True
106
107 if ini.has_option(section, "shared_wd"):
108 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
109 if sl_props['shared_wd'] and (max_builds != 1):
110 raise ValueError('max_builds must be 1 with shared workdir!')
111
112 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
113
114 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
115 # This must match the value configured into the buildworkers (with their
116 # --master option)
117 c['protocols'] = {'pb': {'port': worker_port}}
118
119 # coalesce builds
120 c['collapseRequests'] = True
121
122 # Reduce amount of backlog data
123 c['configurators'] = [util.JanitorConfigurator(
124 logHorizon=timedelta(days=3),
125 hour=6,
126 )]
127
128 ####### CHANGESOURCES
129
130 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
131 scripts_dir = os.path.abspath("../scripts")
132
133 rsync_bin_url = ini.get("rsync", "binary_url")
134 rsync_bin_key = ini.get("rsync", "binary_password")
135
136 rsync_src_url = None
137 rsync_src_key = None
138
139 if ini.has_option("rsync", "source_url"):
140 rsync_src_url = ini.get("rsync", "source_url")
141 rsync_src_key = ini.get("rsync", "source_password")
142
143 rsync_sdk_url = None
144 rsync_sdk_key = None
145 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
146
147 if ini.has_option("rsync", "sdk_url"):
148 rsync_sdk_url = ini.get("rsync", "sdk_url")
149
150 if ini.has_option("rsync", "sdk_password"):
151 rsync_sdk_key = ini.get("rsync", "sdk_password")
152
153 if ini.has_option("rsync", "sdk_pattern"):
154 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
155
156 repo_url = ini.get("repo", "url")
157 repo_branch = "master"
158
159 if ini.has_option("repo", "branch"):
160 repo_branch = ini.get("repo", "branch")
161
162 usign_key = None
163 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
164
165 if ini.has_option("usign", "key"):
166 usign_key = ini.get("usign", "key")
167
168 if ini.has_option("usign", "comment"):
169 usign_comment = ini.get("usign", "comment")
170
171
172 # find arches
173 arches = [ ]
174 archnames = [ ]
175
176 if not os.path.isdir(work_dir+'/source.git'):
177 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
178 else:
179 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
180
181 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
182 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
183 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
184
185 while True:
186 line = findarches.stdout.readline()
187 if not line:
188 break
189 at = line.decode().strip().split()
190 arches.append(at)
191 archnames.append(at[0])
192
193
194 # find feeds
195 feeds = []
196 feedbranches = dict()
197
198 c['change_source'] = []
199
200 def parse_feed_entry(line):
201 parts = line.strip().split()
202 if parts[0].startswith("src-git"):
203 feeds.append(parts)
204 url = parts[2].strip().split(';')
205 branch = url[1] if len(url) > 1 else 'master'
206 feedbranches[url[0]] = branch
207 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
208
209 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
210 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
211
212 line = make.stdout.readline()
213 if line:
214 parse_feed_entry(line)
215
216 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
217 for line in f:
218 parse_feed_entry(line)
219
220
221 ####### SCHEDULERS
222
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
225
226 c['schedulers'] = []
227 c['schedulers'].append(SingleBranchScheduler(
228 name = "all",
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
231 ),
232 treeStableTimer = 60,
233 builderNames = archnames))
234
235 c['schedulers'].append(ForceScheduler(
236 name = "force",
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
240
241 codebases = [
242 util.CodebaseParameter(
243 "",
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
249 )
250 ],
251
252 reason = util.StringParameter(
253 name = "reason",
254 label = "Reason",
255 default = "Trigger build",
256 required = True,
257 size = 80
258 ),
259
260 properties = [
261 util.NestedParameter(
262 name="options",
263 label="Build Options",
264 layout="vertical",
265 fields=[
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
269 default = "all",
270 choices = [ "all" ] + archnames
271 )
272 ]
273 )
274 ]
275 ))
276
277 ####### BUILDERS
278
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
282
283 def GetDirectorySuffix(props):
284 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
285 if props.hasProperty("release_version"):
286 m = verpat.match(props["release_version"])
287 if m is not None:
288 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
289 return ""
290
291 def GetNumJobs(props):
292 if props.hasProperty("workername") and props.hasProperty("nproc"):
293 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
294 else:
295 return 1
296
297 def GetCwd(props):
298 if props.hasProperty("builddir"):
299 return props["builddir"]
300 elif props.hasProperty("workdir"):
301 return props["workdir"]
302 else:
303 return "/"
304
305 def IsArchitectureSelected(target):
306 def CheckArchitectureProperty(step):
307 try:
308 options = step.getProperty("options")
309 if type(options) is dict:
310 selected_arch = options.get("architecture", "all")
311 if selected_arch != "all" and selected_arch != target:
312 return False
313 except KeyError:
314 pass
315
316 return True
317
318 return CheckArchitectureProperty
319
320 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
321 try:
322 seckey = base64.b64decode(seckey)
323 except:
324 return None
325
326 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
327 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
328
329 def IsSharedWorkdir(step):
330 return bool(step.getProperty("shared_wd"))
331
332 @defer.inlineCallbacks
333 def getNewestCompleteTime(bldr):
334 """Returns the complete_at of the latest completed and not SKIPPED
335 build request for this builder, or None if there are no such build
336 requests. We need to filter out SKIPPED requests because we're
337 using collapseRequests=True which is unfortunately marking all
338 previous requests as complete when new buildset is created.
339
340 @returns: datetime instance or None, via Deferred
341 """
342
343 bldrid = yield bldr.getBuilderId()
344 completed = yield bldr.master.data.get(
345 ('builders', bldrid, 'buildrequests'),
346 [
347 resultspec.Filter('complete', 'eq', [True]),
348 resultspec.Filter('results', 'ne', [results.SKIPPED]),
349 ],
350 order=['-complete_at'], limit=1)
351 if not completed:
352 return
353
354 complete_at = completed[0]['complete_at']
355
356 last_build = yield bldr.master.data.get(
357 ('builds', ),
358 [
359 resultspec.Filter('builderid', 'eq', [bldrid]),
360 ],
361 order=['-started_at'], limit=1)
362
363 if last_build and last_build[0]:
364 last_complete_at = last_build[0]['complete_at']
365 if last_complete_at and (last_complete_at > complete_at):
366 return last_complete_at
367
368 return complete_at
369
370 @defer.inlineCallbacks
371 def prioritizeBuilders(master, builders):
372 """Returns sorted list of builders by their last timestamp of completed and
373 not skipped build.
374
375 @returns: list of sorted builders
376 """
377
378 def is_building(bldr):
379 return bool(bldr.building) or bool(bldr.old_building)
380
381 def bldr_info(bldr):
382 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
383 d.addCallback(lambda complete_at: (complete_at, bldr))
384 return d
385
386 def bldr_sort(item):
387 (complete_at, bldr) = item
388
389 if not complete_at:
390 date = datetime.min
391 complete_at = date.replace(tzinfo=tzutc())
392
393 if is_building(bldr):
394 date = datetime.max
395 complete_at = date.replace(tzinfo=tzutc())
396
397 return (complete_at, bldr.name)
398
399 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
400 results.sort(key=bldr_sort)
401
402 for r in results:
403 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
404
405 return [r[1] for r in results]
406
407 c['prioritizeBuilders'] = prioritizeBuilders
408 c['builders'] = []
409
410 dlLock = locks.WorkerLock("worker_dl")
411
412 workerNames = [ ]
413
414 for worker in c['workers']:
415 workerNames.append(worker.workername)
416
417 force_factory = BuildFactory()
418
419 c['builders'].append(BuilderConfig(
420 name = "00_force_build",
421 workernames = workerNames,
422 factory = force_factory))
423
424 for arch in arches:
425 ts = arch[1].split('/')
426
427 factory = BuildFactory()
428
429 # setup shared work directory if required
430 factory.addStep(ShellCommand(
431 name = "sharedwd",
432 description = "Setting up shared work directory",
433 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
434 workdir = ".",
435 haltOnFailure = True,
436 doStepIf = IsSharedWorkdir))
437
438 # find number of cores
439 factory.addStep(SetProperty(
440 name = "nproc",
441 property = "nproc",
442 description = "Finding number of CPUs",
443 command = ["nproc"]))
444
445 # prepare workspace
446 factory.addStep(FileDownload(
447 mastersrc = scripts_dir + '/cleanup.sh',
448 workerdest = "../cleanup.sh",
449 mode = 0o755))
450
451 if not persistent:
452 factory.addStep(ShellCommand(
453 name = "cleanold",
454 description = "Cleaning previous builds",
455 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
456 workdir = ".",
457 haltOnFailure = True,
458 timeout = 2400))
459
460 factory.addStep(ShellCommand(
461 name = "cleanup",
462 description = "Cleaning work area",
463 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
464 workdir = ".",
465 haltOnFailure = True,
466 timeout = 2400))
467
468 # expire tree if needed
469 elif tree_expire > 0:
470 factory.addStep(FileDownload(
471 mastersrc = scripts_dir + '/expire.sh',
472 workerdest = "../expire.sh",
473 mode = 0o755))
474
475 factory.addStep(ShellCommand(
476 name = "expire",
477 description = "Checking for build tree expiry",
478 command = ["./expire.sh", str(tree_expire)],
479 workdir = ".",
480 haltOnFailure = True,
481 timeout = 2400))
482
483 factory.addStep(ShellCommand(
484 name = "mksdkdir",
485 description = "Preparing SDK directory",
486 command = ["mkdir", "-p", "sdk"],
487 haltOnFailure = True))
488
489 factory.addStep(ShellCommand(
490 name = "downloadsdk",
491 description = "Downloading SDK archive",
492 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
493 env={'RSYNC_PASSWORD': rsync_sdk_key},
494 haltOnFailure = True,
495 logEnviron = False))
496
497 factory.addStep(ShellCommand(
498 name = "unpacksdk",
499 description = "Unpacking SDK archive",
500 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
501 haltOnFailure = True))
502
503 factory.addStep(ShellCommand(
504 name = "updatesdk",
505 description = "Updating SDK",
506 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
507 haltOnFailure = True))
508
509 factory.addStep(ShellCommand(
510 name = "cleancmdlinks",
511 description = "Sanitizing host command symlinks",
512 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
513 haltOnFailure = True))
514
515 factory.addStep(StringDownload(
516 name = "writeversionmk",
517 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
518 workerdest = "sdk/getversion.mk",
519 mode = 0o755))
520
521 factory.addStep(SetProperty(
522 name = "getversion",
523 property = "release_version",
524 description = "Finding SDK release version",
525 workdir = "build/sdk",
526 command = ["make", "-f", "getversion.mk"]))
527
528 # install build key
529 if usign_key is not None:
530 factory.addStep(StringDownload(
531 name = "dlkeybuildpub",
532 s = UsignSec2Pub(usign_key, usign_comment),
533 workerdest = "sdk/key-build.pub",
534 mode = 0o600))
535
536 factory.addStep(StringDownload(
537 name = "dlkeybuild",
538 s = "# fake private key",
539 workerdest = "sdk/key-build",
540 mode = 0o600))
541
542 factory.addStep(StringDownload(
543 name = "dlkeybuilducert",
544 s = "# fake certificate",
545 workerdest = "sdk/key-build.ucert",
546 mode = 0o600))
547
548 factory.addStep(ShellCommand(
549 name = "mkdldir",
550 description = "Preparing download directory",
551 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
552 haltOnFailure = True))
553
554 factory.addStep(ShellCommand(
555 name = "mkconf",
556 description = "Preparing SDK configuration",
557 workdir = "build/sdk",
558 command = ["sh", "-c", "rm -f .config && make defconfig"]))
559
560 factory.addStep(FileDownload(
561 mastersrc = scripts_dir + '/ccache.sh',
562 workerdest = 'sdk/ccache.sh',
563 mode = 0o755))
564
565 factory.addStep(ShellCommand(
566 name = "prepccache",
567 description = "Preparing ccache",
568 workdir = "build/sdk",
569 command = ["./ccache.sh"],
570 haltOnFailure = True))
571
572 factory.addStep(ShellCommand(
573 name = "patchfeedsconfgitfull",
574 description = "Patching feeds.conf to use src-git-full",
575 workdir = "build/sdk",
576 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
577 haltOnFailure = True))
578
579 if git_ssh:
580 factory.addStep(StringDownload(
581 name = "dlgitclonekey",
582 s = git_ssh_key,
583 workerdest = "../git-clone.key",
584 mode = 0o600))
585
586 factory.addStep(ShellCommand(
587 name = "patchfeedsconf",
588 description = "Patching feeds.conf to use SSH cloning",
589 workdir = "build/sdk",
590 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
591 haltOnFailure = True))
592
593 factory.addStep(ShellCommand(
594 name = "updatefeeds",
595 description = "Updating feeds",
596 workdir = "build/sdk",
597 command = ["./scripts/feeds", "update", "-f"],
598 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
599 haltOnFailure = True))
600
601 if git_ssh:
602 factory.addStep(ShellCommand(
603 name = "rmfeedsconf",
604 description = "Removing feeds.conf",
605 workdir = "build/sdk",
606 command=["rm", "feeds.conf"],
607 haltOnFailure = True))
608
609 factory.addStep(ShellCommand(
610 name = "installfeeds",
611 description = "Installing feeds",
612 workdir = "build/sdk",
613 command = ["./scripts/feeds", "install", "-a"],
614 haltOnFailure = True))
615
616 factory.addStep(ShellCommand(
617 name = "logclear",
618 description = "Clearing failure logs",
619 workdir = "build/sdk",
620 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
621 haltOnFailure = False,
622 flunkOnFailure = False,
623 warnOnFailure = True,
624 ))
625
626 factory.addStep(ShellCommand(
627 name = "compile",
628 description = "Building packages",
629 workdir = "build/sdk",
630 timeout = 3600,
631 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
632 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
633 haltOnFailure = True))
634
635 factory.addStep(ShellCommand(
636 name = "mkfeedsconf",
637 description = "Generating pinned feeds.conf",
638 workdir = "build/sdk",
639 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
640
641 if ini.has_option("gpg", "key") or usign_key is not None:
642 factory.addStep(MasterShellCommand(
643 name = "signprepare",
644 description = "Preparing temporary signing directory",
645 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
646 haltOnFailure = True
647 ))
648
649 factory.addStep(ShellCommand(
650 name = "signpack",
651 description = "Packing files to sign",
652 workdir = "build/sdk",
653 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
654 haltOnFailure = True
655 ))
656
657 factory.addStep(FileUpload(
658 workersrc = "sdk/sign.tar.gz",
659 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
660 haltOnFailure = True
661 ))
662
663 factory.addStep(MasterShellCommand(
664 name = "signfiles",
665 description = "Signing files",
666 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
667 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
668 haltOnFailure = True
669 ))
670
671 factory.addStep(FileDownload(
672 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
673 workerdest = "sdk/sign.tar.gz",
674 haltOnFailure = True
675 ))
676
677 factory.addStep(ShellCommand(
678 name = "signunpack",
679 description = "Unpacking signed files",
680 workdir = "build/sdk",
681 command = ["tar", "-xzf", "sign.tar.gz"],
682 haltOnFailure = True
683 ))
684
685 factory.addStep(ShellCommand(
686 name = "uploadprepare",
687 description = "Preparing package directory",
688 workdir = "build/sdk",
689 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
690 env={'RSYNC_PASSWORD': rsync_bin_key},
691 haltOnFailure = True,
692 logEnviron = False
693 ))
694
695 factory.addStep(ShellCommand(
696 name = "packageupload",
697 description = "Uploading package files",
698 workdir = "build/sdk",
699 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
700 env={'RSYNC_PASSWORD': rsync_bin_key},
701 haltOnFailure = True,
702 logEnviron = False
703 ))
704
705 factory.addStep(ShellCommand(
706 name = "logprepare",
707 description = "Preparing log directory",
708 workdir = "build/sdk",
709 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
710 env={'RSYNC_PASSWORD': rsync_bin_key},
711 haltOnFailure = True,
712 logEnviron = False
713 ))
714
715 factory.addStep(ShellCommand(
716 name = "logfind",
717 description = "Finding failure logs",
718 workdir = "build/sdk/logs/package/feeds",
719 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
720 haltOnFailure = False,
721 flunkOnFailure = False,
722 warnOnFailure = True,
723 ))
724
725 factory.addStep(ShellCommand(
726 name = "logcollect",
727 description = "Collecting failure logs",
728 workdir = "build/sdk",
729 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
730 haltOnFailure = False,
731 flunkOnFailure = False,
732 warnOnFailure = True,
733 ))
734
735 factory.addStep(ShellCommand(
736 name = "logupload",
737 description = "Uploading failure logs",
738 workdir = "build/sdk",
739 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
740 env={'RSYNC_PASSWORD': rsync_bin_key},
741 haltOnFailure = False,
742 flunkOnFailure = False,
743 warnOnFailure = True,
744 logEnviron = False
745 ))
746
747 if rsync_src_url is not None:
748 factory.addStep(ShellCommand(
749 name = "sourcelist",
750 description = "Finding source archives to upload",
751 workdir = "build/sdk",
752 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
753 haltOnFailure = True
754 ))
755
756 factory.addStep(ShellCommand(
757 name = "sourceupload",
758 description = "Uploading source archives",
759 workdir = "build/sdk",
760 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
761 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
762 env={'RSYNC_PASSWORD': rsync_src_key},
763 haltOnFailure = False,
764 flunkOnFailure = False,
765 warnOnFailure = True,
766 logEnviron = False
767 ))
768
769 factory.addStep(ShellCommand(
770 name = "df",
771 description = "Reporting disk usage",
772 command=["df", "-h", "."],
773 env={'LC_ALL': 'C'},
774 haltOnFailure = False,
775 flunkOnFailure = False,
776 warnOnFailure = False,
777 alwaysRun = True
778 ))
779
780 factory.addStep(ShellCommand(
781 name = "du",
782 description = "Reporting estimated file space usage",
783 command=["du", "-sh", "."],
784 env={'LC_ALL': 'C'},
785 haltOnFailure = False,
786 flunkOnFailure = False,
787 warnOnFailure = False,
788 alwaysRun = True
789 ))
790
791 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
792
793 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
794 force_factory.addStep(steps.Trigger(
795 name = "trigger_%s" % arch[0],
796 description = "Triggering %s build" % arch[0],
797 schedulerNames = [ "trigger_%s" % arch[0] ],
798 set_properties = { "reason": Property("reason") },
799 doStepIf = IsArchitectureSelected(arch[0])
800 ))
801
802 ####### STATUS arches
803
804 # 'status' is a list of Status arches. The results of each build will be
805 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
806 # including web pages, email senders, and IRC bots.
807
808 if ini.has_option("phase2", "status_bind"):
809 c['www'] = {
810 'port': ini.get("phase2", "status_bind"),
811 'plugins': {
812 'waterfall_view': True,
813 'console_view': True,
814 'grid_view': True
815 }
816 }
817
818 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
819 c['www']['auth'] = util.UserPasswordAuth([
820 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
821 ])
822 c['www']['authz'] = util.Authz(
823 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
824 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
825 )
826
827 ####### PROJECT IDENTITY
828
829 # the 'title' string will appear at the top of this buildbot
830 # installation's html.WebStatus home page (linked to the
831 # 'titleURL') and is embedded in the title of the waterfall HTML page.
832
833 c['title'] = ini.get("general", "title")
834 c['titleURL'] = ini.get("general", "title_url")
835
836 # the 'buildbotURL' string should point to the location where the buildbot's
837 # internal web server (usually the html.WebStatus page) is visible. This
838 # typically uses the port number set in the Waterfall 'status' entry, but
839 # with an externally-visible host name which the buildbot cannot figure out
840 # without some help.
841
842 c['buildbotURL'] = buildbot_url
843
844 ####### DB URL
845
846 c['db'] = {
847 # This specifies what database buildbot uses to store its state. You can leave
848 # this at its default for all but the largest installations.
849 'db_url' : "sqlite:///state.sqlite",
850 }
851
852 c['buildbotNetUsageData'] = None