scripts: remove migrated dumpinfo.pl script
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from buildbot import locks
11 from buildbot.changes import filter
12 from buildbot.changes.gitpoller import GitPoller
13 from buildbot.config import BuilderConfig
14 from buildbot.plugins import schedulers
15 from buildbot.plugins import steps
16 from buildbot.plugins import util
17 from buildbot.process.factory import BuildFactory
18 from buildbot.process.properties import Property
19 from buildbot.process.properties import WithProperties
20 from buildbot.schedulers.basic import SingleBranchScheduler
21 from buildbot.schedulers.forcesched import ForceScheduler
22 from buildbot.steps.master import MasterShellCommand
23 from buildbot.steps.shell import SetProperty
24 from buildbot.steps.shell import ShellCommand
25 from buildbot.steps.transfer import FileDownload
26 from buildbot.steps.transfer import FileUpload
27 from buildbot.steps.transfer import StringDownload
28 from buildbot.worker import Worker
29
30
31 ini = configparser.ConfigParser()
32 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
33
34 buildbot_url = ini.get("phase2", "buildbot_url")
35
36 # This is a sample buildmaster config file. It must be installed as
37 # 'master.cfg' in your buildmaster's base directory.
38
39 # This is the dictionary that the buildmaster pays attention to. We also use
40 # a shorter alias to save typing.
41 c = BuildmasterConfig = {}
42
43 ####### BUILDSLAVES
44
45 # The 'workers' list defines the set of recognized buildslaves. Each element is
46 # a Worker object, specifying a unique slave name and password. The same
47 # slave name and password must be configured on the slave.
48
49 slave_port = 9990
50 persistent = False
51 other_builds = 0
52 tree_expire = 0
53 git_ssh = False
54 git_ssh_key = None
55
56 if ini.has_option("phase2", "port"):
57 slave_port = ini.get("phase2", "port")
58
59 if ini.has_option("phase2", "persistent"):
60 persistent = ini.getboolean("phase2", "persistent")
61
62 if ini.has_option("phase2", "other_builds"):
63 other_builds = ini.getint("phase2", "other_builds")
64
65 if ini.has_option("phase2", "expire"):
66 tree_expire = ini.getint("phase2", "expire")
67
68 if ini.has_option("general", "git_ssh"):
69 git_ssh = ini.getboolean("general", "git_ssh")
70
71 if ini.has_option("general", "git_ssh_key"):
72 git_ssh_key = ini.get("general", "git_ssh_key")
73 else:
74 git_ssh = False
75
76 c['workers'] = []
77 max_builds = dict()
78
79 for section in ini.sections():
80 if section.startswith("slave "):
81 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
82 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
83 name = ini.get(section, "name")
84 password = ini.get(section, "password")
85 sl_props = { 'shared_wd': False }
86 max_builds[name] = 1
87
88 if ini.has_option(section, "builds"):
89 max_builds[name] = ini.getint(section, "builds")
90
91 if max_builds[name] == 1:
92 sl_props['shared_wd'] = True
93
94 if ini.has_option(section, "shared_wd"):
95 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
96 if sl_props['shared_wd'] and (max_builds != 1):
97 raise ValueError('max_builds must be 1 with shared workdir!')
98
99 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
100
101 # 'slavePortnum' defines the TCP port to listen on for connections from workers.
102 # This must match the value configured into the buildslaves (with their
103 # --master option)
104 c['protocols'] = {'pb': {'port': slave_port}}
105
106 # coalesce builds
107 c['collapseRequests'] = True
108
109 # Reduce amount of backlog data
110 c['buildHorizon'] = 30
111 c['logHorizon'] = 20
112
113 ####### CHANGESOURCES
114
115 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
116 scripts_dir = os.path.abspath("../scripts")
117
118 rsync_bin_url = ini.get("rsync", "binary_url")
119 rsync_bin_key = ini.get("rsync", "binary_password")
120
121 rsync_src_url = None
122 rsync_src_key = None
123
124 if ini.has_option("rsync", "source_url"):
125 rsync_src_url = ini.get("rsync", "source_url")
126 rsync_src_key = ini.get("rsync", "source_password")
127
128 rsync_sdk_url = None
129 rsync_sdk_key = None
130 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
131
132 if ini.has_option("rsync", "sdk_url"):
133 rsync_sdk_url = ini.get("rsync", "sdk_url")
134
135 if ini.has_option("rsync", "sdk_password"):
136 rsync_sdk_key = ini.get("rsync", "sdk_password")
137
138 if ini.has_option("rsync", "sdk_pattern"):
139 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
140
141 repo_url = ini.get("repo", "url")
142 repo_branch = "master"
143
144 if ini.has_option("repo", "branch"):
145 repo_branch = ini.get("repo", "branch")
146
147 usign_key = None
148 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
149
150 if ini.has_option("usign", "key"):
151 usign_key = ini.get("usign", "key")
152
153 if ini.has_option("usign", "comment"):
154 usign_comment = ini.get("usign", "comment")
155
156
157 # find arches
158 arches = [ ]
159 archnames = [ ]
160
161 if not os.path.isdir(work_dir+'/source.git'):
162 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
163 else:
164 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
165
166 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
167 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
168
169 while True:
170 line = findarches.stdout.readline()
171 if not line:
172 break
173 at = line.decode().strip().split()
174 arches.append(at)
175 archnames.append(at[0])
176
177
178 # find feeds
179 feeds = []
180 feedbranches = dict()
181
182 c['change_source'] = []
183
184 def parse_feed_entry(line):
185 parts = line.strip().split()
186 if parts[0] == "src-git":
187 feeds.append(parts)
188 url = parts[2].strip().split(';')
189 branch = url[1] if len(url) > 1 else 'master'
190 feedbranches[url[0]] = branch
191 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
192
193 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
194 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
195
196 line = make.stdout.readline()
197 if line:
198 parse_feed_entry(line)
199
200 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
201 for line in f:
202 parse_feed_entry(line)
203
204
205 ####### SCHEDULERS
206
207 # Configure the Schedulers, which decide how to react to incoming changes. In this
208 # case, just kick off a 'basebuild' build
209
210 c['schedulers'] = []
211 c['schedulers'].append(SingleBranchScheduler(
212 name = "all",
213 change_filter = filter.ChangeFilter(
214 filter_fn = lambda change: change.branch == feedbranches[change.repository]
215 ),
216 treeStableTimer = 60,
217 builderNames = archnames))
218
219 c['schedulers'].append(ForceScheduler(
220 name = "force",
221 buttonName = "Force builds",
222 label = "Force build details",
223 builderNames = [ "00_force_build" ],
224
225 codebases = [
226 util.CodebaseParameter(
227 "",
228 label = "Repository",
229 branch = util.FixedParameter(name = "branch", default = ""),
230 revision = util.FixedParameter(name = "revision", default = ""),
231 repository = util.FixedParameter(name = "repository", default = ""),
232 project = util.FixedParameter(name = "project", default = "")
233 )
234 ],
235
236 reason = util.StringParameter(
237 name = "reason",
238 label = "Reason",
239 default = "Trigger build",
240 required = True,
241 size = 80
242 ),
243
244 properties = [
245 util.NestedParameter(
246 name="options",
247 label="Build Options",
248 layout="vertical",
249 fields=[
250 util.ChoiceStringParameter(
251 name = "architecture",
252 label = "Build architecture",
253 default = "all",
254 choices = [ "all" ] + archnames
255 )
256 ]
257 )
258 ]
259 ))
260
261 ####### BUILDERS
262
263 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
264 # what steps, and which workers can execute them. Note that any particular build will
265 # only take place on one slave.
266
267 def GetDirectorySuffix(props):
268 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
269 if props.hasProperty("release_version"):
270 m = verpat.match(props["release_version"])
271 if m is not None:
272 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
273 return ""
274
275 def GetNumJobs(props):
276 if props.hasProperty("workername") and props.hasProperty("nproc"):
277 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
278 else:
279 return 1
280
281 def GetCwd(props):
282 if props.hasProperty("builddir"):
283 return props["builddir"]
284 elif props.hasProperty("workdir"):
285 return props["workdir"]
286 else:
287 return "/"
288
289 def IsArchitectureSelected(target):
290 def CheckArchitectureProperty(step):
291 try:
292 options = step.getProperty("options")
293 if type(options) is dict:
294 selected_arch = options.get("architecture", "all")
295 if selected_arch != "all" and selected_arch != target:
296 return False
297 except KeyError:
298 pass
299
300 return True
301
302 return CheckArchitectureProperty
303
304 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
305 try:
306 seckey = base64.b64decode(seckey)
307 except:
308 return None
309
310 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
311 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
312
313 def IsSharedWorkdir(step):
314 return bool(step.getProperty("shared_wd"))
315
316
317 c['builders'] = []
318
319 dlLock = locks.WorkerLock("slave_dl")
320
321 slaveNames = [ ]
322
323 for slave in c['workers']:
324 slaveNames.append(slave.workername)
325
326 force_factory = BuildFactory()
327
328 c['builders'].append(BuilderConfig(
329 name = "00_force_build",
330 workernames = slaveNames,
331 factory = force_factory))
332
333 for arch in arches:
334 ts = arch[1].split('/')
335
336 factory = BuildFactory()
337
338 # setup shared work directory if required
339 factory.addStep(ShellCommand(
340 name = "sharedwd",
341 description = "Setting up shared work directory",
342 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
343 workdir = ".",
344 haltOnFailure = True,
345 doStepIf = IsSharedWorkdir))
346
347 # find number of cores
348 factory.addStep(SetProperty(
349 name = "nproc",
350 property = "nproc",
351 description = "Finding number of CPUs",
352 command = ["nproc"]))
353
354 # prepare workspace
355 factory.addStep(FileDownload(
356 mastersrc = scripts_dir + '/cleanup.sh',
357 workerdest = "../cleanup.sh",
358 mode = 0o755))
359
360 if not persistent:
361 factory.addStep(ShellCommand(
362 name = "cleanold",
363 description = "Cleaning previous builds",
364 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
365 workdir = ".",
366 haltOnFailure = True,
367 timeout = 2400))
368
369 factory.addStep(ShellCommand(
370 name = "cleanup",
371 description = "Cleaning work area",
372 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
373 workdir = ".",
374 haltOnFailure = True,
375 timeout = 2400))
376
377 # expire tree if needed
378 elif tree_expire > 0:
379 factory.addStep(FileDownload(
380 mastersrc = scripts_dir + '/expire.sh',
381 workerdest = "../expire.sh",
382 mode = 0o755))
383
384 factory.addStep(ShellCommand(
385 name = "expire",
386 description = "Checking for build tree expiry",
387 command = ["./expire.sh", str(tree_expire)],
388 workdir = ".",
389 haltOnFailure = True,
390 timeout = 2400))
391
392 factory.addStep(ShellCommand(
393 name = "mksdkdir",
394 description = "Preparing SDK directory",
395 command = ["mkdir", "-p", "sdk"],
396 haltOnFailure = True))
397
398 factory.addStep(ShellCommand(
399 name = "downloadsdk",
400 description = "Downloading SDK archive",
401 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
402 env={'RSYNC_PASSWORD': rsync_sdk_key},
403 haltOnFailure = True,
404 logEnviron = False))
405
406 factory.addStep(ShellCommand(
407 name = "unpacksdk",
408 description = "Unpacking SDK archive",
409 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
410 haltOnFailure = True))
411
412 factory.addStep(ShellCommand(
413 name = "updatesdk",
414 description = "Updating SDK",
415 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
416 haltOnFailure = True))
417
418 factory.addStep(ShellCommand(
419 name = "cleancmdlinks",
420 description = "Sanitizing host command symlinks",
421 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
422 haltOnFailure = True))
423
424 factory.addStep(StringDownload(
425 name = "writeversionmk",
426 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
427 workerdest = "sdk/getversion.mk",
428 mode = 0o755))
429
430 factory.addStep(SetProperty(
431 name = "getversion",
432 property = "release_version",
433 description = "Finding SDK release version",
434 workdir = "build/sdk",
435 command = ["make", "-f", "getversion.mk"]))
436
437 # install build key
438 if usign_key is not None:
439 factory.addStep(StringDownload(
440 name = "dlkeybuildpub",
441 s = UsignSec2Pub(usign_key, usign_comment),
442 workerdest = "sdk/key-build.pub",
443 mode = 0o600))
444
445 factory.addStep(StringDownload(
446 name = "dlkeybuild",
447 s = "# fake private key",
448 workerdest = "sdk/key-build",
449 mode = 0o600))
450
451 factory.addStep(StringDownload(
452 name = "dlkeybuilducert",
453 s = "# fake certificate",
454 workerdest = "sdk/key-build.ucert",
455 mode = 0o600))
456
457 factory.addStep(ShellCommand(
458 name = "mkdldir",
459 description = "Preparing download directory",
460 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
461 haltOnFailure = True))
462
463 factory.addStep(ShellCommand(
464 name = "mkconf",
465 description = "Preparing SDK configuration",
466 workdir = "build/sdk",
467 command = ["sh", "-c", "rm -f .config && make defconfig"]))
468
469 factory.addStep(FileDownload(
470 mastersrc = scripts_dir + '/ccache.sh',
471 workerdest = 'sdk/ccache.sh',
472 mode = 0o755))
473
474 factory.addStep(ShellCommand(
475 name = "prepccache",
476 description = "Preparing ccache",
477 workdir = "build/sdk",
478 command = ["./ccache.sh"],
479 haltOnFailure = True))
480
481 factory.addStep(ShellCommand(
482 name = "patchfeedsconfgitfull",
483 description = "Patching feeds.conf to use src-git-full",
484 workdir = "build/sdk",
485 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
486 haltOnFailure = True))
487
488 if git_ssh:
489 factory.addStep(StringDownload(
490 name = "dlgitclonekey",
491 s = git_ssh_key,
492 workerdest = "../git-clone.key",
493 mode = 0o600))
494
495 factory.addStep(ShellCommand(
496 name = "patchfeedsconf",
497 description = "Patching feeds.conf to use SSH cloning",
498 workdir = "build/sdk",
499 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
500 haltOnFailure = True))
501
502 factory.addStep(ShellCommand(
503 name = "updatefeeds",
504 description = "Updating feeds",
505 workdir = "build/sdk",
506 command = ["./scripts/feeds", "update", "-f"],
507 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
508 haltOnFailure = True))
509
510 if git_ssh:
511 factory.addStep(ShellCommand(
512 name = "rmfeedsconf",
513 description = "Removing feeds.conf",
514 workdir = "build/sdk",
515 command=["rm", "feeds.conf"],
516 haltOnFailure = True))
517
518 factory.addStep(ShellCommand(
519 name = "installfeeds",
520 description = "Installing feeds",
521 workdir = "build/sdk",
522 command = ["./scripts/feeds", "install", "-a"],
523 haltOnFailure = True))
524
525 factory.addStep(ShellCommand(
526 name = "logclear",
527 description = "Clearing failure logs",
528 workdir = "build/sdk",
529 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
530 haltOnFailure = False
531 ))
532
533 factory.addStep(ShellCommand(
534 name = "compile",
535 description = "Building packages",
536 workdir = "build/sdk",
537 timeout = 3600,
538 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
539 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
540 haltOnFailure = True))
541
542 factory.addStep(ShellCommand(
543 name = "mkfeedsconf",
544 description = "Generating pinned feeds.conf",
545 workdir = "build/sdk",
546 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
547
548 if ini.has_option("gpg", "key") or usign_key is not None:
549 factory.addStep(MasterShellCommand(
550 name = "signprepare",
551 description = "Preparing temporary signing directory",
552 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
553 haltOnFailure = True
554 ))
555
556 factory.addStep(ShellCommand(
557 name = "signpack",
558 description = "Packing files to sign",
559 workdir = "build/sdk",
560 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
561 haltOnFailure = True
562 ))
563
564 factory.addStep(FileUpload(
565 workersrc = "sdk/sign.tar.gz",
566 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
567 haltOnFailure = True
568 ))
569
570 factory.addStep(MasterShellCommand(
571 name = "signfiles",
572 description = "Signing files",
573 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
574 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
575 haltOnFailure = True
576 ))
577
578 factory.addStep(FileDownload(
579 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
580 workerdest = "sdk/sign.tar.gz",
581 haltOnFailure = True
582 ))
583
584 factory.addStep(ShellCommand(
585 name = "signunpack",
586 description = "Unpacking signed files",
587 workdir = "build/sdk",
588 command = ["tar", "-xzf", "sign.tar.gz"],
589 haltOnFailure = True
590 ))
591
592 factory.addStep(ShellCommand(
593 name = "uploadprepare",
594 description = "Preparing package directory",
595 workdir = "build/sdk",
596 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
597 env={'RSYNC_PASSWORD': rsync_bin_key},
598 haltOnFailure = True,
599 logEnviron = False
600 ))
601
602 factory.addStep(ShellCommand(
603 name = "packageupload",
604 description = "Uploading package files",
605 workdir = "build/sdk",
606 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
607 env={'RSYNC_PASSWORD': rsync_bin_key},
608 haltOnFailure = True,
609 logEnviron = False
610 ))
611
612 factory.addStep(ShellCommand(
613 name = "logprepare",
614 description = "Preparing log directory",
615 workdir = "build/sdk",
616 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
617 env={'RSYNC_PASSWORD': rsync_bin_key},
618 haltOnFailure = True,
619 logEnviron = False
620 ))
621
622 factory.addStep(ShellCommand(
623 name = "logfind",
624 description = "Finding failure logs",
625 workdir = "build/sdk/logs/package/feeds",
626 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
627 haltOnFailure = False
628 ))
629
630 factory.addStep(ShellCommand(
631 name = "logcollect",
632 description = "Collecting failure logs",
633 workdir = "build/sdk",
634 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
635 haltOnFailure = False
636 ))
637
638 factory.addStep(ShellCommand(
639 name = "logupload",
640 description = "Uploading failure logs",
641 workdir = "build/sdk",
642 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
643 env={'RSYNC_PASSWORD': rsync_bin_key},
644 haltOnFailure = False,
645 logEnviron = False
646 ))
647
648 if rsync_src_url is not None:
649 factory.addStep(ShellCommand(
650 name = "sourcelist",
651 description = "Finding source archives to upload",
652 workdir = "build/sdk",
653 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
654 haltOnFailure = True
655 ))
656
657 factory.addStep(ShellCommand(
658 name = "sourceupload",
659 description = "Uploading source archives",
660 workdir = "build/sdk",
661 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
662 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
663 env={'RSYNC_PASSWORD': rsync_src_key},
664 haltOnFailure = False,
665 logEnviron = False
666 ))
667
668 factory.addStep(ShellCommand(
669 name = "df",
670 description = "Reporting disk usage",
671 command=["df", "-h", "."],
672 env={'LC_ALL': 'C'},
673 haltOnFailure = False,
674 alwaysRun = True
675 ))
676
677 c['builders'].append(BuilderConfig(name=arch[0], workernames=slaveNames, factory=factory))
678
679 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
680 force_factory.addStep(steps.Trigger(
681 name = "trigger_%s" % arch[0],
682 description = "Triggering %s build" % arch[0],
683 schedulerNames = [ "trigger_%s" % arch[0] ],
684 set_properties = { "reason": Property("reason") },
685 doStepIf = IsArchitectureSelected(arch[0])
686 ))
687
688 ####### STATUS arches
689
690 # 'status' is a list of Status arches. The results of each build will be
691 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
692 # including web pages, email senders, and IRC bots.
693
694 if ini.has_option("phase2", "status_bind"):
695 c['www'] = {
696 'port': ini.get("phase2", "status_bind"),
697 'plugins': {
698 'waterfall_view': True,
699 'console_view': True,
700 'grid_view': True
701 }
702 }
703
704 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
705 c['www']['auth'] = util.UserPasswordAuth([
706 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
707 ])
708 c['www']['authz'] = util.Authz(
709 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
710 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
711 )
712
713 ####### PROJECT IDENTITY
714
715 # the 'title' string will appear at the top of this buildbot
716 # installation's html.WebStatus home page (linked to the
717 # 'titleURL') and is embedded in the title of the waterfall HTML page.
718
719 c['title'] = ini.get("general", "title")
720 c['titleURL'] = ini.get("general", "title_url")
721
722 # the 'buildbotURL' string should point to the location where the buildbot's
723 # internal web server (usually the html.WebStatus page) is visible. This
724 # typically uses the port number set in the Waterfall 'status' entry, but
725 # with an externally-visible host name which the buildbot cannot figure out
726 # without some help.
727
728 c['buildbotURL'] = buildbot_url
729
730 ####### DB URL
731
732 c['db'] = {
733 # This specifies what database buildbot uses to store its state. You can leave
734 # this at its default for all but the largest installations.
735 'db_url' : "sqlite:///state.sqlite",
736 }
737
738 c['buildbotNetUsageData'] = None