2 # ex: set syntax=python:
9 from buildbot import locks
11 ini = ConfigParser.ConfigParser()
12 ini.read("./config.ini")
14 buildbot_url = ini.get("general", "buildbot_url")
16 # This is a sample buildmaster config file. It must be installed as
17 # 'master.cfg' in your buildmaster's base directory.
19 # This is the dictionary that the buildmaster pays attention to. We also use
20 # a shorter alias to save typing.
21 c = BuildmasterConfig = {}
25 # The 'slaves' list defines the set of recognized buildslaves. Each element is
26 # a BuildSlave object, specifying a unique slave name and password. The same
27 # slave name and password must be configured on the slave.
28 from buildbot.buildslave import BuildSlave
35 if ini.has_option("general", "port"):
36 slave_port = ini.getint("general", "port")
38 if ini.has_option("general", "persistent"):
39 persistent = ini.getboolean("general", "persistent")
41 if ini.has_option("general", "other_builds"):
42 other_builds = ini.getint("general", "other_builds")
44 if ini.has_option("general", "expire"):
45 tree_expire = ini.getint("general", "expire")
50 for section in ini.sections():
51 if section.startswith("slave "):
52 if ini.has_option(section, "name") and ini.has_option(section, "password"):
53 name = ini.get(section, "name")
54 password = ini.get(section, "password")
56 if ini.has_option(section, "builds"):
57 max_builds[name] = ini.getint(section, "builds")
58 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
60 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
61 # This must match the value configured into the buildslaves (with their
63 c['slavePortnum'] = slave_port
66 c['mergeRequests'] = True
68 # Reduce amount of backlog data
69 c['buildHorizon'] = 30
74 home_dir = os.path.abspath(ini.get("general", "homedir"))
76 rsync_bin_url = ini.get("rsync", "binary_url")
77 rsync_bin_key = ini.get("rsync", "binary_password")
82 if ini.has_option("rsync", "source_url"):
83 rsync_src_url = ini.get("rsync", "source_url")
84 rsync_src_key = ini.get("rsync", "source_password")
88 rsync_sdk_pat = "lede-sdk-*.tar.xz"
90 if ini.has_option("rsync", "sdk_url"):
91 rsync_sdk_url = ini.get("rsync", "sdk_url")
93 if ini.has_option("rsync", "sdk_password"):
94 rsync_sdk_key = ini.get("rsync", "sdk_password")
96 if ini.has_option("rsync", "sdk_pattern"):
97 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
101 gpg_comment = "Unattended build signature"
102 gpg_passfile = "/dev/null"
104 if ini.has_option("gpg", "home"):
105 gpg_home = ini.get("gpg", "home")
107 if ini.has_option("gpg", "keyid"):
108 gpg_keyid = ini.get("gpg", "keyid")
110 if ini.has_option("gpg", "comment"):
111 gpg_comment = ini.get("gpg", "comment")
113 if ini.has_option("gpg", "passfile"):
114 gpg_passfile = ini.get("gpg", "passfile")
121 findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'],
122 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
125 line = findarches.stdout.readline()
128 at = line.strip().split()
130 archnames.append(at[0])
135 feedbranches = dict()
137 from buildbot.changes.gitpoller import GitPoller
138 c['change_source'] = []
140 with open(home_dir+'/source.git/feeds.conf.default', 'r') as f:
142 parts = line.strip().split()
143 if parts[0] == "src-git":
145 url = parts[2].strip().split(';')
146 branch = url[1] if len(url) > 1 else 'master'
147 feedbranches[url[0]] = branch
148 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
153 # Configure the Schedulers, which decide how to react to incoming changes. In this
154 # case, just kick off a 'basebuild' build
156 def branch_change_filter(change):
157 return change.branch == feedbranches[change.repository]
159 from buildbot.schedulers.basic import SingleBranchScheduler
160 from buildbot.schedulers.forcesched import ForceScheduler
161 from buildbot.changes import filter
163 c['schedulers'].append(SingleBranchScheduler(
165 change_filter=filter.ChangeFilter(filter_fn=branch_change_filter),
167 builderNames=archnames))
169 c['schedulers'].append(ForceScheduler(
171 builderNames=archnames))
175 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
176 # what steps, and which slaves can execute them. Note that any particular build will
177 # only take place on one slave.
179 from buildbot.process.factory import BuildFactory
180 from buildbot.steps.source import Git
181 from buildbot.steps.shell import ShellCommand
182 from buildbot.steps.shell import SetProperty
183 from buildbot.steps.transfer import FileUpload
184 from buildbot.steps.transfer import FileDownload
185 from buildbot.steps.master import MasterShellCommand
186 from buildbot.process.properties import WithProperties
189 def GetDirectorySuffix(props):
190 if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]):
191 return "-%s" % props["slavename"].split('-')[1]
195 def GetNumJobs(props):
196 if props.hasProperty("slavename") and props.hasProperty("nproc"):
197 return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
204 dlLock = locks.SlaveLock("slave_dl")
208 for slave in c['slaves']:
209 slaveNames.append(slave.slavename)
212 ts = arch[1].split('/')
214 factory = BuildFactory()
216 # find number of cores
217 factory.addStep(SetProperty(
220 description = "Finding number of CPUs",
221 command = ["nproc"]))
224 factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755))
227 factory.addStep(ShellCommand(
229 description = "Cleaning previous builds",
230 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"],
231 haltOnFailure = True,
234 factory.addStep(ShellCommand(
236 description = "Cleaning work area",
237 command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"],
238 haltOnFailure = True,
241 # expire tree if needed
242 elif tree_expire > 0:
243 factory.addStep(FileDownload(
244 mastersrc = home_dir+"/expire.sh",
245 slavedest = "../expire.sh",
248 factory.addStep(ShellCommand(
250 description = "Checking for build tree expiry",
251 command = ["./expire.sh", str(tree_expire)],
253 haltOnFailure = True,
256 factory.addStep(ShellCommand(
258 description = "Preparing SDK directory",
259 command = ["mkdir", "-p", "sdk"],
260 haltOnFailure = True))
262 factory.addStep(ShellCommand(
263 name = "downloadsdk",
264 description = "Downloading SDK archive",
265 command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
266 env={'RSYNC_PASSWORD': rsync_sdk_key},
267 haltOnFailure = True,
270 factory.addStep(ShellCommand(
272 description = "Unpacking SDK archive",
273 command = ["tar", "--keep-newer-files", "--no-overwrite-dir", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"],
274 haltOnFailure = True))
276 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600))
277 factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600))
279 factory.addStep(ShellCommand(
281 description = "Preparing download directory",
282 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"]))
284 factory.addStep(ShellCommand(
286 description = "Preparing SDK configuration",
287 workdir = "build/sdk",
288 command = ["sh", "-c", "rm -f .config && make defconfig"]))
290 factory.addStep(ShellCommand(
291 name = "updatefeeds",
292 description = "Updating feeds",
293 workdir = "build/sdk",
294 command = ["./scripts/feeds", "update"]))
296 factory.addStep(ShellCommand(
297 name = "installfeeds",
298 description = "Installing feeds",
299 workdir = "build/sdk",
300 command = ["./scripts/feeds", "install", "-a"]))
302 factory.addStep(ShellCommand(
304 description = "Building packages",
305 workdir = "build/sdk",
306 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"]))
308 factory.addStep(ShellCommand(
309 name = "mkfeedsconf",
310 description = "Generating pinned feeds.conf",
311 workdir = "build/sdk",
312 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
314 if gpg_keyid is not None:
315 factory.addStep(MasterShellCommand(
316 name = "signprepare",
317 description = "Preparing temporary signing directory",
318 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
322 factory.addStep(ShellCommand(
324 description = "Packing files to sign",
325 workdir = "build/sdk",
326 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
330 factory.addStep(FileUpload(
331 slavesrc = "sdk/sign.tar.gz",
332 masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
336 factory.addStep(MasterShellCommand(
338 description = "Signing files",
339 command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment],
340 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
344 factory.addStep(FileDownload(
345 mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]),
346 slavedest = "sdk/sign.tar.gz",
350 factory.addStep(ShellCommand(
352 description = "Unpacking signed files",
353 workdir = "build/sdk",
354 command = ["tar", "-xzf", "sign.tar.gz"],
358 factory.addStep(ShellCommand(
359 name = "uploadprepare",
360 description = "Preparing package directory",
361 workdir = "build/sdk",
362 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
363 env={'RSYNC_PASSWORD': rsync_bin_key},
364 haltOnFailure = True,
368 factory.addStep(ShellCommand(
369 name = "packageupload",
370 description = "Uploading package files",
371 workdir = "build/sdk",
372 command = ["rsync", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
373 env={'RSYNC_PASSWORD': rsync_bin_key},
374 haltOnFailure = True,
378 factory.addStep(ShellCommand(
380 description = "Preparing log directory",
381 workdir = "build/sdk",
382 command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)],
383 env={'RSYNC_PASSWORD': rsync_bin_key},
384 haltOnFailure = True,
388 factory.addStep(ShellCommand(
390 description = "Finding failure logs",
391 workdir = "build/sdk/logs/package/feeds",
392 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
393 haltOnFailure = False
396 factory.addStep(ShellCommand(
398 description = "Collecting failure logs",
399 workdir = "build/sdk",
400 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
401 haltOnFailure = False
404 factory.addStep(ShellCommand(
406 description = "Uploading failure logs",
407 workdir = "build/sdk",
408 command = ["rsync", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])],
409 env={'RSYNC_PASSWORD': rsync_bin_key},
410 haltOnFailure = False,
414 if rsync_src_url is not None:
415 factory.addStep(ShellCommand(
416 name = "sourceupload",
417 description = "Uploading source archives",
418 workdir = "build/sdk",
419 command = ["rsync", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
420 env={'RSYNC_PASSWORD': rsync_src_key},
421 haltOnFailure = False,
425 from buildbot.config import BuilderConfig
427 c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory))
430 ####### STATUS arches
432 # 'status' is a list of Status arches. The results of each build will be
433 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
434 # including web pages, email senders, and IRC bots.
438 from buildbot.status import html
439 from buildbot.status.web import authz, auth
441 if ini.has_option("status", "bind"):
442 if ini.has_option("status", "user") and ini.has_option("status", "password"):
443 authz_cfg=authz.Authz(
444 # change any of these to True to enable; see the manual for more
446 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
447 gracefulShutdown = 'auth',
448 forceBuild = 'auth', # use this to test your slave once it is set up
449 forceAllBuilds = 'auth',
452 stopAllBuilds = 'auth',
453 cancelPendingBuild = 'auth',
455 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
457 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
459 ####### PROJECT IDENTITY
461 # the 'title' string will appear at the top of this buildbot
462 # installation's html.WebStatus home page (linked to the
463 # 'titleURL') and is embedded in the title of the waterfall HTML page.
465 c['title'] = ini.get("general", "title")
466 c['titleURL'] = ini.get("general", "title_url")
468 # the 'buildbotURL' string should point to the location where the buildbot's
469 # internal web server (usually the html.WebStatus page) is visible. This
470 # typically uses the port number set in the Waterfall 'status' entry, but
471 # with an externally-visible host name which the buildbot cannot figure out
474 c['buildbotURL'] = buildbot_url
479 # This specifies what database buildbot uses to store its state. You can leave
480 # this at its default for all but the largest installations.
481 'db_url' : "sqlite:///state.sqlite",