phase2: abort if ccache or dl/ setup failed
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import subprocess
7 import ConfigParser
8
9 from buildbot import locks
10
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
13
14 buildbot_url = ini.get("general", "buildbot_url")
15
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
18
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
22
23 ####### BUILDSLAVES
24
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
29
30 slave_port = 9990
31 persistent = False
32 other_builds = 0
33 tree_expire = 0
34
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
37
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
40
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
43
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
46
47 c['slaves'] = []
48 max_builds = dict()
49
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
55 max_builds[name] = 1
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
59
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
62 # --master option)
63 c['slavePortnum'] = slave_port
64
65 # coalesce builds
66 c['mergeRequests'] = True
67
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
70 c['logHorizon'] = 20
71
72 ####### CHANGESOURCES
73
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
75
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
78
79 rsync_src_url = None
80 rsync_src_key = None
81
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
85
86 rsync_sdk_url = None
87 rsync_sdk_key = None
88 rsync_sdk_pat = "lede-sdk-*.tar.xz"
89
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
92
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
95
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
98
99 gpg_home = "~/.gnupg"
100 gpg_keyid = None
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
103
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
106
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
109
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
112
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
115
116
117 # find arches
118 arches = [ ]
119 archnames = [ ]
120
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
123
124 while True:
125 line = findarches.stdout.readline()
126 if not line:
127 break
128 at = line.strip().split()
129 arches.append(at)
130 archnames.append(at[0])
131
132
133 # find feeds
134 feeds = []
135 feedbranches = dict()
136
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
139
140 def parse_feed_entry(line):
141 parts = line.strip().split()
142 if parts[0] == "src-git":
143 feeds.append(parts)
144 url = parts[2].strip().split(';')
145 branch = url[1] if len(url) > 1 else 'master'
146 feedbranches[url[0]] = branch
147 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
148
149 make = subprocess.Popen(['make', '--no-print-directory', '-C', home_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
150 env = dict(os.environ, TOPDIR=home_dir+'/source.git'), stdout = subprocess.PIPE)
151
152 line = make.stdout.readline()
153 if line:
154 parse_feed_entry(line)
155
156 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
157 for line in f:
158 parse_feed_entry(line)
159
160
161 ####### SCHEDULERS
162
163 # Configure the Schedulers, which decide how to react to incoming changes. In this
164 # case, just kick off a 'basebuild' build
165
166 def branch_change_filter(change):
167 return change.branch == feedbranches[change.repository]
168
169 from buildbot.schedulers.basic import SingleBranchScheduler
170 from buildbot.schedulers.forcesched import ForceScheduler
171 from buildbot.changes import filter
172 c['schedulers'] = []
173 c['schedulers'].append(SingleBranchScheduler(
174 name="all",
175 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
176 treeStableTimer=60,
177 builderNames=archnames))
178
179 c['schedulers'].append(ForceScheduler(
180 name="force",
181 builderNames=archnames))
182
183 ####### BUILDERS
184
185 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
186 # what steps, and which slaves can execute them. Note that any particular build will
187 # only take place on one slave.
188
189 from buildbot.process.factory import BuildFactory
190 from buildbot.steps.source import Git
191 from buildbot.steps.shell import ShellCommand
192 from buildbot.steps.shell import SetProperty
193 from buildbot.steps.transfer import FileUpload
194 from buildbot.steps.transfer import FileDownload
195 from buildbot.steps.master import MasterShellCommand
196 from buildbot.process.properties import WithProperties
197
198
199 def GetDirectorySuffix(props):
200 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
201 return "-%s" % props["slavename"].split('-')[1]
202 else:
203 return ""
204
205 def GetNumJobs(props):
206 if props.hasProperty("slavename") and props.hasProperty("nproc"):
207 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
208 else:
209 return 1
210
211 def GetCwd(props):
212 if props.hasProperty("builddir"):
213 return props["builddir"]
214 elif props.hasProperty("workdir"):
215 return props["workdir"]
216 else:
217 return "/"
218
219
220 c['builders'] = []
221
222 dlLock = locks.SlaveLock("slave_dl")
223
224 slaveNames = [ ]
225
226 for slave in c['slaves']:
227 slaveNames.append(slave.slavename)
228
229 for arch in arches:
230 ts = arch[1].split('/')
231
232 factory = BuildFactory()
233
234 # find number of cores
235 factory.addStep(SetProperty(
236 name = "nproc",
237 property = "nproc",
238 description = "Finding number of CPUs",
239 command = ["nproc"]))
240
241 # prepare workspace
242 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
243
244 if not persistent:
245 factory.addStep(ShellCommand(
246 name = "cleanold",
247 description = "Cleaning previous builds",
248 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
249 haltOnFailure = True,
250 timeout = 2400))
251
252 factory.addStep(ShellCommand(
253 name = "cleanup",
254 description = "Cleaning work area",
255 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
256 haltOnFailure = True,
257 timeout = 2400))
258
259 # expire tree if needed
260 elif tree_expire > 0:
261 factory.addStep(FileDownload(
262 mastersrc = home_dir+"/expire.sh",
263 slavedest = "../expire.sh",
264 mode = 0755))
265
266 factory.addStep(ShellCommand(
267 name = "expire",
268 description = "Checking for build tree expiry",
269 command = ["./expire.sh", str(tree_expire)],
270 workdir = ".",
271 haltOnFailure = True,
272 timeout = 2400))
273
274 factory.addStep(ShellCommand(
275 name = "mksdkdir",
276 description = "Preparing SDK directory",
277 command = ["mkdir", "-p", "sdk"],
278 haltOnFailure = True))
279
280 factory.addStep(ShellCommand(
281 name = "downloadsdk",
282 description = "Downloading SDK archive",
283 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
284 env={'RSYNC_PASSWORD': rsync_sdk_key},
285 haltOnFailure = True,
286 logEnviron = False))
287
288 factory.addStep(ShellCommand(
289 name = "unpacksdk",
290 description = "Unpacking SDK archive",
291 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
292 haltOnFailure = True))
293
294 factory.addStep(ShellCommand(
295 name = "updatesdk",
296 description = "Updating SDK",
297 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
298 haltOnFailure = True))
299
300 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
301 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
302
303 factory.addStep(ShellCommand(
304 name = "mkdldir",
305 description = "Preparing download directory",
306 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
307 haltOnFailure = True))
308
309 factory.addStep(ShellCommand(
310 name = "mkconf",
311 description = "Preparing SDK configuration",
312 workdir = "build/sdk",
313 command = ["sh", "-c", "rm -f .config && make defconfig"]))
314
315 factory.addStep(FileDownload(
316 mastersrc = home_dir+'/ccache.sh',
317 slavedest = 'sdk/ccache.sh',
318 mode = 0755))
319
320 factory.addStep(ShellCommand(
321 name = "prepccache",
322 description = "Preparing ccache",
323 workdir = "build/sdk",
324 command = ["./ccache.sh"],
325 haltOnFailure = True))
326
327 factory.addStep(ShellCommand(
328 name = "updatefeeds",
329 description = "Updating feeds",
330 workdir = "build/sdk",
331 command = ["./scripts/feeds", "update", "-f"]))
332
333 factory.addStep(ShellCommand(
334 name = "installfeeds",
335 description = "Installing feeds",
336 workdir = "build/sdk",
337 command = ["./scripts/feeds", "install", "-a"]))
338
339 factory.addStep(ShellCommand(
340 name = "compile",
341 description = "Building packages",
342 workdir = "build/sdk",
343 timeout = 3600,
344 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"],
345 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
346 haltOnFailure = True))
347
348 factory.addStep(ShellCommand(
349 name = "mkfeedsconf",
350 description = "Generating pinned feeds.conf",
351 workdir = "build/sdk",
352 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
353
354 if gpg_keyid is not None:
355 factory.addStep(MasterShellCommand(
356 name = "signprepare",
357 description = "Preparing temporary signing directory",
358 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
359 haltOnFailure = True
360 ))
361
362 factory.addStep(ShellCommand(
363 name = "signpack",
364 description = "Packing files to sign",
365 workdir = "build/sdk",
366 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
367 haltOnFailure = True
368 ))
369
370 factory.addStep(FileUpload(
371 slavesrc = "sdk/sign.tar.gz",
372 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
373 haltOnFailure = True
374 ))
375
376 factory.addStep(MasterShellCommand(
377 name = "signfiles",
378 description = "Signing files",
379 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
380 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
381 haltOnFailure = True
382 ))
383
384 factory.addStep(FileDownload(
385 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
386 slavedest = "sdk/sign.tar.gz",
387 haltOnFailure = True
388 ))
389
390 factory.addStep(ShellCommand(
391 name = "signunpack",
392 description = "Unpacking signed files",
393 workdir = "build/sdk",
394 command = ["tar", "-xzf", "sign.tar.gz"],
395 haltOnFailure = True
396 ))
397
398 factory.addStep(ShellCommand(
399 name = "uploadprepare",
400 description = "Preparing package directory",
401 workdir = "build/sdk",
402 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
403 env={'RSYNC_PASSWORD': rsync_bin_key},
404 haltOnFailure = True,
405 logEnviron = False
406 ))
407
408 factory.addStep(ShellCommand(
409 name = "packageupload",
410 description = "Uploading package files",
411 workdir = "build/sdk",
412 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
413 env={'RSYNC_PASSWORD': rsync_bin_key},
414 haltOnFailure = True,
415 logEnviron = False
416 ))
417
418 factory.addStep(ShellCommand(
419 name = "logprepare",
420 description = "Preparing log directory",
421 workdir = "build/sdk",
422 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
423 env={'RSYNC_PASSWORD': rsync_bin_key},
424 haltOnFailure = True,
425 logEnviron = False
426 ))
427
428 factory.addStep(ShellCommand(
429 name = "logfind",
430 description = "Finding failure logs",
431 workdir = "build/sdk/logs/package/feeds",
432 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
433 haltOnFailure = False
434 ))
435
436 factory.addStep(ShellCommand(
437 name = "logcollect",
438 description = "Collecting failure logs",
439 workdir = "build/sdk",
440 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
441 haltOnFailure = False
442 ))
443
444 factory.addStep(ShellCommand(
445 name = "logupload",
446 description = "Uploading failure logs",
447 workdir = "build/sdk",
448 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
449 env={'RSYNC_PASSWORD': rsync_bin_key},
450 haltOnFailure = False,
451 logEnviron = False
452 ))
453
454 if rsync_src_url is not None:
455 factory.addStep(ShellCommand(
456 name = "sourceupload",
457 description = "Uploading source archives",
458 workdir = "build/sdk",
459 command = ["rsync", "-4", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
460 env={'RSYNC_PASSWORD': rsync_src_key},
461 haltOnFailure = False,
462 logEnviron = False
463 ))
464
465 from buildbot.config import BuilderConfig
466
467 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
468
469
470 ####### STATUS arches
471
472 # 'status' is a list of Status arches. The results of each build will be
473 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
474 # including web pages, email senders, and IRC bots.
475
476 c['status'] = []
477
478 from buildbot.status import html
479 from buildbot.status.web import authz, auth
480
481 if ini.has_option("status", "bind"):
482 if ini.has_option("status", "user") and ini.has_option("status", "password"):
483 authz_cfg=authz.Authz(
484 # change any of these to True to enable; see the manual for more
485 # options
486 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
487 gracefulShutdown = 'auth',
488 forceBuild = 'auth', # use this to test your slave once it is set up
489 forceAllBuilds = 'auth',
490 pingBuilder = False,
491 stopBuild = 'auth',
492 stopAllBuilds = 'auth',
493 cancelPendingBuild = 'auth',
494 )
495 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
496 else:
497 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
498
499 ####### PROJECT IDENTITY
500
501 # the 'title' string will appear at the top of this buildbot
502 # installation's html.WebStatus home page (linked to the
503 # 'titleURL') and is embedded in the title of the waterfall HTML page.
504
505 c['title'] = ini.get("general", "title")
506 c['titleURL'] = ini.get("general", "title_url")
507
508 # the 'buildbotURL' string should point to the location where the buildbot's
509 # internal web server (usually the html.WebStatus page) is visible. This
510 # typically uses the port number set in the Waterfall 'status' entry, but
511 # with an externally-visible host name which the buildbot cannot figure out
512 # without some help.
513
514 c['buildbotURL'] = buildbot_url
515
516 ####### DB URL
517
518 c['db'] = {
519 # This specifies what database buildbot uses to store its state. You can leave
520 # this at its default for all but the largest installations.
521 'db_url' : "sqlite:///state.sqlite",
522 }