phase1,phase2: s/master/main for phase{1,2}
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from buildbot import locks
11 from buildbot.changes import filter
12 from buildbot.changes.gitpoller import GitPoller
13 from buildbot.config import BuilderConfig
14 from buildbot.plugins import schedulers
15 from buildbot.plugins import steps
16 from buildbot.plugins import util
17 from buildbot.process.factory import BuildFactory
18 from buildbot.process.properties import Property
19 from buildbot.process.properties import WithProperties
20 from buildbot.schedulers.basic import SingleBranchScheduler
21 from buildbot.schedulers.forcesched import ForceScheduler
22 from buildbot.steps.master import MasterShellCommand
23 from buildbot.steps.shell import SetProperty
24 from buildbot.steps.shell import ShellCommand
25 from buildbot.steps.transfer import FileDownload
26 from buildbot.steps.transfer import FileUpload
27 from buildbot.steps.transfer import StringDownload
28 from buildbot.worker import Worker
29
30
31 ini = configparser.ConfigParser()
32 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
33
34 buildbot_url = ini.get("phase2", "buildbot_url")
35
36 # This is a sample buildmaster config file. It must be installed as
37 # 'master.cfg' in your buildmaster's base directory.
38
39 # This is the dictionary that the buildmaster pays attention to. We also use
40 # a shorter alias to save typing.
41 c = BuildmasterConfig = {}
42
43 ####### BUILDSLAVES
44
45 # The 'workers' list defines the set of recognized buildslaves. Each element is
46 # a Worker object, specifying a unique slave name and password. The same
47 # slave name and password must be configured on the slave.
48
49 slave_port = 9990
50 persistent = False
51 other_builds = 0
52 tree_expire = 0
53 git_ssh = False
54 git_ssh_key = None
55
56 if ini.has_option("phase2", "port"):
57 slave_port = ini.get("phase2", "port")
58
59 if ini.has_option("phase2", "persistent"):
60 persistent = ini.getboolean("phase2", "persistent")
61
62 if ini.has_option("phase2", "other_builds"):
63 other_builds = ini.getint("phase2", "other_builds")
64
65 if ini.has_option("phase2", "expire"):
66 tree_expire = ini.getint("phase2", "expire")
67
68 if ini.has_option("general", "git_ssh"):
69 git_ssh = ini.getboolean("general", "git_ssh")
70
71 if ini.has_option("general", "git_ssh_key"):
72 git_ssh_key = ini.get("general", "git_ssh_key")
73 else:
74 git_ssh = False
75
76 c['workers'] = []
77 max_builds = dict()
78
79 for section in ini.sections():
80 if section.startswith("slave "):
81 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
82 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
83 name = ini.get(section, "name")
84 password = ini.get(section, "password")
85 sl_props = { 'shared_wd': False }
86 max_builds[name] = 1
87
88 if ini.has_option(section, "builds"):
89 max_builds[name] = ini.getint(section, "builds")
90
91 if max_builds[name] == 1:
92 sl_props['shared_wd'] = True
93
94 if ini.has_option(section, "shared_wd"):
95 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
96 if sl_props['shared_wd'] and (max_builds != 1):
97 raise ValueError('max_builds must be 1 with shared workdir!')
98
99 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
100
101 # 'slavePortnum' defines the TCP port to listen on for connections from workers.
102 # This must match the value configured into the buildslaves (with their
103 # --master option)
104 c['protocols'] = {'pb': {'port': slave_port}}
105
106 # coalesce builds
107 c['collapseRequests'] = True
108
109 # Reduce amount of backlog data
110 c['buildHorizon'] = 30
111 c['logHorizon'] = 20
112
113 ####### CHANGESOURCES
114
115 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
116 scripts_dir = os.path.abspath("../scripts")
117
118 rsync_bin_url = ini.get("rsync", "binary_url")
119 rsync_bin_key = ini.get("rsync", "binary_password")
120
121 rsync_src_url = None
122 rsync_src_key = None
123
124 if ini.has_option("rsync", "source_url"):
125 rsync_src_url = ini.get("rsync", "source_url")
126 rsync_src_key = ini.get("rsync", "source_password")
127
128 rsync_sdk_url = None
129 rsync_sdk_key = None
130 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
131
132 if ini.has_option("rsync", "sdk_url"):
133 rsync_sdk_url = ini.get("rsync", "sdk_url")
134
135 if ini.has_option("rsync", "sdk_password"):
136 rsync_sdk_key = ini.get("rsync", "sdk_password")
137
138 if ini.has_option("rsync", "sdk_pattern"):
139 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
140
141 repo_url = ini.get("repo", "url")
142 repo_branch = "master"
143
144 if ini.has_option("repo", "branch"):
145 repo_branch = ini.get("repo", "branch")
146
147 usign_key = None
148 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
149
150 if ini.has_option("usign", "key"):
151 usign_key = ini.get("usign", "key")
152
153 if ini.has_option("usign", "comment"):
154 usign_comment = ini.get("usign", "comment")
155
156
157 # find arches
158 arches = [ ]
159 archnames = [ ]
160
161 if not os.path.isdir(work_dir+'/source.git'):
162 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
163 else:
164 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
165
166 findarches = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'architectures'],
167 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
168
169 while True:
170 line = findarches.stdout.readline()
171 if not line:
172 break
173 at = line.decode().strip().split()
174 arches.append(at)
175 archnames.append(at[0])
176
177
178 # find feeds
179 feeds = []
180 feedbranches = dict()
181
182 c['change_source'] = []
183
184 def parse_feed_entry(line):
185 parts = line.strip().split()
186 if parts[0] == "src-git":
187 feeds.append(parts)
188 url = parts[2].strip().split(';')
189 branch = url[1] if len(url) > 1 else 'master'
190 feedbranches[url[0]] = branch
191 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
192
193 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
194 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
195
196 line = make.stdout.readline()
197 if line:
198 parse_feed_entry(line)
199
200 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
201 for line in f:
202 parse_feed_entry(line)
203
204
205 ####### SCHEDULERS
206
207 # Configure the Schedulers, which decide how to react to incoming changes. In this
208 # case, just kick off a 'basebuild' build
209
210 c['schedulers'] = []
211 c['schedulers'].append(SingleBranchScheduler(
212 name = "all",
213 change_filter = filter.ChangeFilter(
214 filter_fn = lambda change: change.branch == feedbranches[change.repository]
215 ),
216 treeStableTimer = 60,
217 builderNames = archnames))
218
219 c['schedulers'].append(ForceScheduler(
220 name = "force",
221 buttonName = "Force builds",
222 label = "Force build details",
223 builderNames = [ "00_force_build" ],
224
225 codebases = [
226 util.CodebaseParameter(
227 "",
228 label = "Repository",
229 branch = util.FixedParameter(name = "branch", default = ""),
230 revision = util.FixedParameter(name = "revision", default = ""),
231 repository = util.FixedParameter(name = "repository", default = ""),
232 project = util.FixedParameter(name = "project", default = "")
233 )
234 ],
235
236 reason = util.StringParameter(
237 name = "reason",
238 label = "Reason",
239 default = "Trigger build",
240 required = True,
241 size = 80
242 ),
243
244 properties = [
245 util.NestedParameter(
246 name="options",
247 label="Build Options",
248 layout="vertical",
249 fields=[
250 util.ChoiceStringParameter(
251 name = "architecture",
252 label = "Build architecture",
253 default = "all",
254 choices = [ "all" ] + archnames
255 )
256 ]
257 )
258 ]
259 ))
260
261 ####### BUILDERS
262
263 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
264 # what steps, and which workers can execute them. Note that any particular build will
265 # only take place on one slave.
266
267 def GetDirectorySuffix(props):
268 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
269 if props.hasProperty("release_version"):
270 m = verpat.match(props["release_version"])
271 if m is not None:
272 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
273 return ""
274
275 def GetNumJobs(props):
276 if props.hasProperty("workername") and props.hasProperty("nproc"):
277 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
278 else:
279 return 1
280
281 def GetCwd(props):
282 if props.hasProperty("builddir"):
283 return props["builddir"]
284 elif props.hasProperty("workdir"):
285 return props["workdir"]
286 else:
287 return "/"
288
289 def IsArchitectureSelected(target):
290 def CheckArchitectureProperty(step):
291 try:
292 options = step.getProperty("options")
293 if type(options) is dict:
294 selected_arch = options.get("architecture", "all")
295 if selected_arch != "all" and selected_arch != target:
296 return False
297 except KeyError:
298 pass
299
300 return True
301
302 return CheckArchitectureProperty
303
304 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
305 try:
306 seckey = base64.b64decode(seckey)
307 except:
308 return None
309
310 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
311 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
312
313 def IsSharedWorkdir(step):
314 return bool(step.getProperty("shared_wd"))
315
316
317 c['builders'] = []
318
319 dlLock = locks.WorkerLock("slave_dl")
320
321 slaveNames = [ ]
322
323 for slave in c['workers']:
324 slaveNames.append(slave.workername)
325
326 force_factory = BuildFactory()
327
328 c['builders'].append(BuilderConfig(
329 name = "00_force_build",
330 workernames = slaveNames,
331 factory = force_factory))
332
333 for arch in arches:
334 ts = arch[1].split('/')
335
336 factory = BuildFactory()
337
338 # setup shared work directory if required
339 factory.addStep(ShellCommand(
340 name = "sharedwd",
341 description = "Setting up shared work directory",
342 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
343 workdir = ".",
344 haltOnFailure = True,
345 doStepIf = IsSharedWorkdir))
346
347 # find number of cores
348 factory.addStep(SetProperty(
349 name = "nproc",
350 property = "nproc",
351 description = "Finding number of CPUs",
352 command = ["nproc"]))
353
354 # prepare workspace
355 factory.addStep(FileDownload(
356 mastersrc = scripts_dir + '/cleanup.sh',
357 workerdest = "../cleanup.sh",
358 mode = 0o755))
359
360 if not persistent:
361 factory.addStep(ShellCommand(
362 name = "cleanold",
363 description = "Cleaning previous builds",
364 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
365 workdir = ".",
366 haltOnFailure = True,
367 timeout = 2400))
368
369 factory.addStep(ShellCommand(
370 name = "cleanup",
371 description = "Cleaning work area",
372 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
373 workdir = ".",
374 haltOnFailure = True,
375 timeout = 2400))
376
377 # expire tree if needed
378 elif tree_expire > 0:
379 factory.addStep(FileDownload(
380 mastersrc = scripts_dir + '/expire.sh',
381 workerdest = "../expire.sh",
382 mode = 0o755))
383
384 factory.addStep(ShellCommand(
385 name = "expire",
386 description = "Checking for build tree expiry",
387 command = ["./expire.sh", str(tree_expire)],
388 workdir = ".",
389 haltOnFailure = True,
390 timeout = 2400))
391
392 factory.addStep(ShellCommand(
393 name = "mksdkdir",
394 description = "Preparing SDK directory",
395 command = ["mkdir", "-p", "sdk"],
396 haltOnFailure = True))
397
398 factory.addStep(ShellCommand(
399 name = "downloadsdk",
400 description = "Downloading SDK archive",
401 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
402 env={'RSYNC_PASSWORD': rsync_sdk_key},
403 haltOnFailure = True,
404 logEnviron = False))
405
406 factory.addStep(ShellCommand(
407 name = "unpacksdk",
408 description = "Unpacking SDK archive",
409 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
410 haltOnFailure = True))
411
412 factory.addStep(ShellCommand(
413 name = "updatesdk",
414 description = "Updating SDK",
415 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
416 haltOnFailure = True))
417
418 factory.addStep(ShellCommand(
419 name = "cleancmdlinks",
420 description = "Sanitizing host command symlinks",
421 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
422 haltOnFailure = True))
423
424 factory.addStep(StringDownload(
425 name = "writeversionmk",
426 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
427 workerdest = "sdk/getversion.mk",
428 mode = 0o755))
429
430 factory.addStep(SetProperty(
431 name = "getversion",
432 property = "release_version",
433 description = "Finding SDK release version",
434 workdir = "build/sdk",
435 command = ["make", "-f", "getversion.mk"]))
436
437 # install build key
438 if usign_key is not None:
439 factory.addStep(StringDownload(
440 name = "dlkeybuildpub",
441 s = UsignSec2Pub(usign_key, usign_comment),
442 workerdest = "sdk/key-build.pub",
443 mode = 0o600))
444
445 factory.addStep(StringDownload(
446 name = "dlkeybuild",
447 s = "# fake private key",
448 workerdest = "sdk/key-build",
449 mode = 0o600))
450
451 factory.addStep(StringDownload(
452 name = "dlkeybuilducert",
453 s = "# fake certificate",
454 workerdest = "sdk/key-build.ucert",
455 mode = 0o600))
456
457 factory.addStep(ShellCommand(
458 name = "mkdldir",
459 description = "Preparing download directory",
460 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
461 haltOnFailure = True))
462
463 factory.addStep(ShellCommand(
464 name = "mkconf",
465 description = "Preparing SDK configuration",
466 workdir = "build/sdk",
467 command = ["sh", "-c", "rm -f .config && make defconfig"]))
468
469 factory.addStep(FileDownload(
470 mastersrc = scripts_dir + '/ccache.sh',
471 workerdest = 'sdk/ccache.sh',
472 mode = 0o755))
473
474 factory.addStep(ShellCommand(
475 name = "prepccache",
476 description = "Preparing ccache",
477 workdir = "build/sdk",
478 command = ["./ccache.sh"],
479 haltOnFailure = True))
480
481 if git_ssh:
482 factory.addStep(StringDownload(
483 name = "dlgitclonekey",
484 s = git_ssh_key,
485 workerdest = "../git-clone.key",
486 mode = 0o600))
487
488 factory.addStep(ShellCommand(
489 name = "patchfeedsconf",
490 description = "Patching feeds.conf",
491 workdir = "build/sdk",
492 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
493 haltOnFailure = True))
494
495 factory.addStep(ShellCommand(
496 name = "updatefeeds",
497 description = "Updating feeds",
498 workdir = "build/sdk",
499 command = ["./scripts/feeds", "update", "-f"],
500 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
501 haltOnFailure = True))
502
503 if git_ssh:
504 factory.addStep(ShellCommand(
505 name = "rmfeedsconf",
506 description = "Removing feeds.conf",
507 workdir = "build/sdk",
508 command=["rm", "feeds.conf"],
509 haltOnFailure = True))
510
511 factory.addStep(ShellCommand(
512 name = "installfeeds",
513 description = "Installing feeds",
514 workdir = "build/sdk",
515 command = ["./scripts/feeds", "install", "-a"],
516 haltOnFailure = True))
517
518 factory.addStep(ShellCommand(
519 name = "logclear",
520 description = "Clearing failure logs",
521 workdir = "build/sdk",
522 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
523 haltOnFailure = False
524 ))
525
526 factory.addStep(ShellCommand(
527 name = "compile",
528 description = "Building packages",
529 workdir = "build/sdk",
530 timeout = 3600,
531 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
532 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
533 haltOnFailure = True))
534
535 factory.addStep(ShellCommand(
536 name = "mkfeedsconf",
537 description = "Generating pinned feeds.conf",
538 workdir = "build/sdk",
539 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
540
541 if ini.has_option("gpg", "key") or usign_key is not None:
542 factory.addStep(MasterShellCommand(
543 name = "signprepare",
544 description = "Preparing temporary signing directory",
545 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
546 haltOnFailure = True
547 ))
548
549 factory.addStep(ShellCommand(
550 name = "signpack",
551 description = "Packing files to sign",
552 workdir = "build/sdk",
553 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
554 haltOnFailure = True
555 ))
556
557 factory.addStep(FileUpload(
558 workersrc = "sdk/sign.tar.gz",
559 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
560 haltOnFailure = True
561 ))
562
563 factory.addStep(MasterShellCommand(
564 name = "signfiles",
565 description = "Signing files",
566 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
567 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
568 haltOnFailure = True
569 ))
570
571 factory.addStep(FileDownload(
572 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
573 workerdest = "sdk/sign.tar.gz",
574 haltOnFailure = True
575 ))
576
577 factory.addStep(ShellCommand(
578 name = "signunpack",
579 description = "Unpacking signed files",
580 workdir = "build/sdk",
581 command = ["tar", "-xzf", "sign.tar.gz"],
582 haltOnFailure = True
583 ))
584
585 factory.addStep(ShellCommand(
586 name = "uploadprepare",
587 description = "Preparing package directory",
588 workdir = "build/sdk",
589 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
590 env={'RSYNC_PASSWORD': rsync_bin_key},
591 haltOnFailure = True,
592 logEnviron = False
593 ))
594
595 factory.addStep(ShellCommand(
596 name = "packageupload",
597 description = "Uploading package files",
598 workdir = "build/sdk",
599 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
600 env={'RSYNC_PASSWORD': rsync_bin_key},
601 haltOnFailure = True,
602 logEnviron = False
603 ))
604
605 factory.addStep(ShellCommand(
606 name = "logprepare",
607 description = "Preparing log directory",
608 workdir = "build/sdk",
609 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
610 env={'RSYNC_PASSWORD': rsync_bin_key},
611 haltOnFailure = True,
612 logEnviron = False
613 ))
614
615 factory.addStep(ShellCommand(
616 name = "logfind",
617 description = "Finding failure logs",
618 workdir = "build/sdk/logs/package/feeds",
619 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
620 haltOnFailure = False
621 ))
622
623 factory.addStep(ShellCommand(
624 name = "logcollect",
625 description = "Collecting failure logs",
626 workdir = "build/sdk",
627 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
628 haltOnFailure = False
629 ))
630
631 factory.addStep(ShellCommand(
632 name = "logupload",
633 description = "Uploading failure logs",
634 workdir = "build/sdk",
635 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
636 env={'RSYNC_PASSWORD': rsync_bin_key},
637 haltOnFailure = False,
638 logEnviron = False
639 ))
640
641 if rsync_src_url is not None:
642 factory.addStep(ShellCommand(
643 name = "sourcelist",
644 description = "Finding source archives to upload",
645 workdir = "build/sdk",
646 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
647 haltOnFailure = True
648 ))
649
650 factory.addStep(ShellCommand(
651 name = "sourceupload",
652 description = "Uploading source archives",
653 workdir = "build/sdk",
654 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
655 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
656 env={'RSYNC_PASSWORD': rsync_src_key},
657 haltOnFailure = False,
658 logEnviron = False
659 ))
660
661 factory.addStep(ShellCommand(
662 name = "df",
663 description = "Reporting disk usage",
664 command=["df", "-h", "."],
665 env={'LC_ALL': 'C'},
666 haltOnFailure = False,
667 alwaysRun = True
668 ))
669
670 c['builders'].append(BuilderConfig(name=arch[0], workernames=slaveNames, factory=factory))
671
672 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
673 force_factory.addStep(steps.Trigger(
674 name = "trigger_%s" % arch[0],
675 description = "Triggering %s build" % arch[0],
676 schedulerNames = [ "trigger_%s" % arch[0] ],
677 set_properties = { "reason": Property("reason") },
678 doStepIf = IsArchitectureSelected(arch[0])
679 ))
680
681 ####### STATUS arches
682
683 # 'status' is a list of Status arches. The results of each build will be
684 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
685 # including web pages, email senders, and IRC bots.
686
687 if ini.has_option("phase2", "status_bind"):
688 c['www'] = {
689 'port': ini.get("phase2", "status_bind"),
690 'plugins': {
691 'waterfall_view': True,
692 'console_view': True,
693 'grid_view': True
694 }
695 }
696
697 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
698 c['www']['auth'] = util.UserPasswordAuth([
699 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
700 ])
701 c['www']['authz'] = util.Authz(
702 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
703 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
704 )
705
706 ####### PROJECT IDENTITY
707
708 # the 'title' string will appear at the top of this buildbot
709 # installation's html.WebStatus home page (linked to the
710 # 'titleURL') and is embedded in the title of the waterfall HTML page.
711
712 c['title'] = ini.get("general", "title")
713 c['titleURL'] = ini.get("general", "title_url")
714
715 # the 'buildbotURL' string should point to the location where the buildbot's
716 # internal web server (usually the html.WebStatus page) is visible. This
717 # typically uses the port number set in the Waterfall 'status' entry, but
718 # with an externally-visible host name which the buildbot cannot figure out
719 # without some help.
720
721 c['buildbotURL'] = buildbot_url
722
723 ####### DB URL
724
725 c['db'] = {
726 # This specifies what database buildbot uses to store its state. You can leave
727 # this at its default for all but the largest installations.
728 'db_url' : "sqlite:///state.sqlite",
729 }
730
731 c['buildbotNetUsageData'] = None