phase1: remove GetCwd()
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
124
125 # coalesce builds
126 c['collapseRequests'] = True
127
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
131 hour=6,
132 )]
133
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
141
142 @returns: datetime instance or None, via Deferred
143 """
144
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
148 [
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
151 ],
152 order=['-complete_at'], limit=1)
153 if not completed:
154 return
155
156 complete_at = completed[0]['complete_at']
157
158 last_build = yield bldr.master.data.get(
159 ('builds', ),
160 [
161 resultspec.Filter('builderid', 'eq', [bldrid]),
162 ],
163 order=['-started_at'], limit=1)
164
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
169
170 return complete_at
171
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
175 not skipped build.
176
177 @returns: list of sorted builders
178 """
179
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
182
183 def bldr_info(bldr):
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
186 return d
187
188 def bldr_sort(item):
189 (complete_at, bldr) = item
190
191 if not complete_at:
192 date = datetime.min
193 complete_at = date.replace(tzinfo=tzutc())
194
195 if is_building(bldr):
196 date = datetime.max
197 complete_at = date.replace(tzinfo=tzutc())
198
199 return (complete_at, bldr.name)
200
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
203
204 for r in results:
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
206
207 return [r[1] for r in results]
208
209 c['prioritizeBuilders'] = prioritizeBuilders
210
211 ####### CHANGESOURCES
212
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
215
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
218
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
221
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
225
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
228
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
232
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
235
236 usign_key = None
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
238
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
242
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
244
245
246 # find targets
247 targets = [ ]
248
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
251 else:
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
253
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
257
258 while True:
259 line = findtargets.stdout.readline()
260 if not line:
261 break
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
264
265
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
268
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
271 repo_url,
272 workdir=work_dir+'/work.git', branch=repo_branch,
273 pollinterval=300))
274
275 ####### SCHEDULERS
276
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
279
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
282 type = "list"
283 strict = True
284
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
288
289 @property
290 def choices(self):
291 taglist = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
293
294 if basever:
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
298
299 while True:
300 line = findtags.stdout.readline()
301
302 if not line:
303 break
304
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
306
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
309
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
312
313 self._choice_list = taglist
314
315 return self._choice_list
316
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
320 return s
321
322 c['schedulers'] = []
323 c['schedulers'].append(SingleBranchScheduler(
324 name = "all",
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
328
329 c['schedulers'].append(ForceScheduler(
330 name = "force",
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
334
335 codebases = [
336 util.CodebaseParameter(
337 "",
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
343 )
344 ],
345
346 reason = util.StringParameter(
347 name = "reason",
348 label = "Reason",
349 default = "Trigger build",
350 required = True,
351 size = 80
352 ),
353
354 properties = [
355 util.NestedParameter(
356 name="options",
357 label="Build Options",
358 layout="vertical",
359 fields=[
360 util.ChoiceStringParameter(
361 name = "target",
362 label = "Build target",
363 default = "all",
364 choices = [ "all" ] + targets
365 ),
366 TagChoiceParameter(
367 name = "tag",
368 label = "Build tag",
369 default = ""
370 )
371 ]
372 )
373 ]
374 ))
375
376 ####### BUILDERS
377
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
381
382 CleanTargetMap = [
383 [ "tools", "tools/clean" ],
384 [ "chain", "toolchain/clean" ],
385 [ "linux", "target/linux/clean" ],
386 [ "dir", "dirclean" ],
387 [ "dist", "distclean" ]
388 ]
389
390 def IsMakeCleanRequested(pattern):
391 def CheckCleanProperty(step):
392 val = step.getProperty("clean")
393 if val and re.match(pattern, val):
394 return True
395 else:
396 return False
397
398 return CheckCleanProperty
399
400 def IsSharedWorkdir(step):
401 return bool(step.getProperty("shared_wd"))
402
403 def IsCleanupRequested(step):
404 if IsSharedWorkdir(step):
405 return False
406 do_cleanup = step.getProperty("do_cleanup")
407 if do_cleanup:
408 return True
409 else:
410 return False
411
412 def IsExpireRequested(step):
413 if IsSharedWorkdir(step):
414 return False
415 else:
416 return not IsCleanupRequested(step)
417
418 def IsGitFreshRequested(step):
419 do_cleanup = step.getProperty("do_cleanup")
420 if do_cleanup:
421 return True
422 else:
423 return False
424
425 def IsGitCleanRequested(step):
426 return not IsGitFreshRequested(step)
427
428 def IsTaggingRequested(step):
429 val = step.getProperty("tag")
430 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
431 return True
432 else:
433 return False
434
435 def IsNoTaggingRequested(step):
436 return not IsTaggingRequested(step)
437
438 def IsNoMasterBuild(step):
439 return repo_branch != "master"
440
441 def GetBaseVersion():
442 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
443 return repo_branch.split('-')[1]
444 else:
445 return "master"
446
447 @properties.renderer
448 def GetVersionPrefix(props):
449 basever = GetBaseVersion()
450 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
451 return "%s/" % props["tag"]
452 elif basever != "master":
453 return "%s-SNAPSHOT/" % basever
454 else:
455 return ""
456
457 @properties.renderer
458 def GetNumJobs(props):
459 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
460 return str(int(int(props["nproc"]) / props["max_builds"]))
461 else:
462 return "1"
463
464 @properties.renderer
465 def GetCC(props):
466 if props.hasProperty("cc_command"):
467 return props["cc_command"]
468 else:
469 return "gcc"
470
471 @properties.renderer
472 def GetCXX(props):
473 if props.hasProperty("cxx_command"):
474 return props["cxx_command"]
475 else:
476 return "g++"
477
478 @properties.renderer
479 def GetCCache(props):
480 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
481 return props["ccache_command"]
482 else:
483 return ""
484
485 def GetNextBuild(builder, requests):
486 for r in requests:
487 if r.properties and r.properties.hasProperty("tag"):
488 return r
489
490 r = requests[0]
491 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
492 return r
493
494 def MakeEnv(overrides=None, tryccache=False):
495 env = {
496 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
497 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
498 }
499 if tryccache:
500 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
501 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
502 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
503 else:
504 env['CC'] = env['CCC']
505 env['CXX'] = env['CCXX']
506 env['CCACHE'] = ''
507 if overrides is not None:
508 env.update(overrides)
509 return env
510
511 @properties.renderer
512 def NetLockDl(props):
513 lock = None
514 if props.hasProperty("dl_lock"):
515 lock = NetLocks[props["dl_lock"]]
516 if lock is not None:
517 return [lock.access('exclusive')]
518 else:
519 return []
520
521 @properties.renderer
522 def NetLockUl(props):
523 lock = None
524 if props.hasProperty("ul_lock"):
525 lock = NetLocks[props["ul_lock"]]
526 if lock is not None:
527 return [lock.access('exclusive')]
528 else:
529 return []
530
531 @util.renderer
532 def TagPropertyValue(props):
533 if props.hasProperty("options"):
534 options = props.getProperty("options")
535 if type(options) is dict:
536 return options.get("tag")
537 return None
538
539 def IsTargetSelected(target):
540 def CheckTargetProperty(step):
541 try:
542 options = step.getProperty("options")
543 if type(options) is dict:
544 selected_target = options.get("target", "all")
545 if selected_target != "all" and selected_target != target:
546 return False
547 except KeyError:
548 pass
549
550 return True
551
552 return CheckTargetProperty
553
554 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
555 try:
556 seckey = base64.b64decode(seckey)
557 except:
558 return None
559
560 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
561 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
562
563
564 c['builders'] = []
565
566 dlLock = locks.WorkerLock("worker_dl")
567
568 workerNames = [ ]
569
570 for worker in c['workers']:
571 workerNames.append(worker.workername)
572
573 force_factory = BuildFactory()
574
575 c['builders'].append(BuilderConfig(
576 name = "00_force_build",
577 workernames = workerNames,
578 factory = force_factory))
579
580 for target in targets:
581 ts = target.split('/')
582
583 factory = BuildFactory()
584
585 # setup shared work directory if required
586 factory.addStep(ShellCommand(
587 name = "sharedwd",
588 description = "Setting up shared work directory",
589 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
590 workdir = ".",
591 haltOnFailure = True,
592 doStepIf = IsSharedWorkdir))
593
594 # find number of cores
595 factory.addStep(SetPropertyFromCommand(
596 name = "nproc",
597 property = "nproc",
598 description = "Finding number of CPUs",
599 command = ["nproc"]))
600
601 # find gcc and g++ compilers
602 factory.addStep(FileDownload(
603 name = "dlfindbinpl",
604 mastersrc = scripts_dir + '/findbin.pl',
605 workerdest = "../findbin.pl",
606 mode = 0o755))
607
608 factory.addStep(SetPropertyFromCommand(
609 name = "gcc",
610 property = "cc_command",
611 description = "Finding gcc command",
612 command = [
613 "../findbin.pl", "gcc", "", "",
614 ],
615 haltOnFailure = True))
616
617 factory.addStep(SetPropertyFromCommand(
618 name = "g++",
619 property = "cxx_command",
620 description = "Finding g++ command",
621 command = [
622 "../findbin.pl", "g++", "", "",
623 ],
624 haltOnFailure = True))
625
626 # see if ccache is available
627 factory.addStep(SetPropertyFromCommand(
628 property = "ccache_command",
629 command = ["which", "ccache"],
630 description = "Testing for ccache command",
631 haltOnFailure = False,
632 flunkOnFailure = False,
633 warnOnFailure = False,
634 ))
635
636 # expire tree if needed
637 if tree_expire > 0:
638 factory.addStep(FileDownload(
639 name = "dlexpiresh",
640 doStepIf = IsExpireRequested,
641 mastersrc = scripts_dir + '/expire.sh',
642 workerdest = "../expire.sh",
643 mode = 0o755))
644
645 factory.addStep(ShellCommand(
646 name = "expire",
647 description = "Checking for build tree expiry",
648 command = ["./expire.sh", str(tree_expire)],
649 workdir = ".",
650 haltOnFailure = True,
651 doStepIf = IsExpireRequested,
652 timeout = 2400))
653
654 # cleanup.sh if needed
655 factory.addStep(FileDownload(
656 name = "dlcleanupsh",
657 mastersrc = scripts_dir + '/cleanup.sh',
658 workerdest = "../cleanup.sh",
659 mode = 0o755,
660 doStepIf = IsCleanupRequested))
661
662 factory.addStep(ShellCommand(
663 name = "cleanold",
664 description = "Cleaning previous builds",
665 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
666 workdir = ".",
667 haltOnFailure = True,
668 doStepIf = IsCleanupRequested,
669 timeout = 2400))
670
671 factory.addStep(ShellCommand(
672 name = "cleanup",
673 description = "Cleaning work area",
674 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
675 workdir = ".",
676 haltOnFailure = True,
677 doStepIf = IsCleanupRequested,
678 timeout = 2400))
679
680 # user-requested clean targets
681 for tuple in CleanTargetMap:
682 factory.addStep(ShellCommand(
683 name = tuple[1],
684 description = 'User-requested "make %s"' % tuple[1],
685 command = ["make", tuple[1], "V=s"],
686 env = MakeEnv(),
687 doStepIf = IsMakeCleanRequested(tuple[0])
688 ))
689
690 # Workaround bug when switching from a checked out tag back to a branch
691 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
692 factory.addStep(ShellCommand(
693 name = "gitcheckout",
694 description = "Ensure that Git HEAD is sane",
695 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
696 haltOnFailure = True))
697
698 # check out the source
699 # Git() runs:
700 # if repo doesn't exist: 'git clone repourl'
701 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
702 # 'git fetch -t repourl branch; git reset --hard revision'
703 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
704 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
705 factory.addStep(Git(
706 name = "gitclean",
707 repourl = repo_url,
708 branch = repo_branch,
709 mode = 'full',
710 method = 'clean',
711 locks = NetLockDl,
712 haltOnFailure = True,
713 doStepIf = IsGitCleanRequested,
714 ))
715
716 factory.addStep(Git(
717 name = "gitfresh",
718 repourl = repo_url,
719 branch = repo_branch,
720 mode = 'full',
721 method = 'fresh',
722 locks = NetLockDl,
723 haltOnFailure = True,
724 doStepIf = IsGitFreshRequested,
725 ))
726
727 # update remote refs
728 factory.addStep(ShellCommand(
729 name = "fetchrefs",
730 description = "Fetching Git remote refs",
731 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
732 haltOnFailure = True
733 ))
734
735 # switch to tag
736 factory.addStep(ShellCommand(
737 name = "switchtag",
738 description = "Checking out Git tag",
739 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
740 haltOnFailure = True,
741 doStepIf = IsTaggingRequested
742 ))
743
744 # Verify that Git HEAD points to a tag or branch
745 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
746 factory.addStep(ShellCommand(
747 name = "gitverify",
748 description = "Ensure that Git HEAD is pointing to a branch or tag",
749 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
750 haltOnFailure = True))
751
752 factory.addStep(ShellCommand(
753 name = "rmtmp",
754 description = "Remove tmp folder",
755 command=["rm", "-rf", "tmp/"]))
756
757 # feed
758 factory.addStep(ShellCommand(
759 name = "rmfeedlinks",
760 description = "Remove feed symlinks",
761 command=["rm", "-rf", "package/feeds/"]))
762
763 factory.addStep(StringDownload(
764 name = "ccachecc",
765 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
766 workerdest = "../ccache_cc.sh",
767 mode = 0o755,
768 ))
769
770 factory.addStep(StringDownload(
771 name = "ccachecxx",
772 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
773 workerdest = "../ccache_cxx.sh",
774 mode = 0o755,
775 ))
776
777 # feed
778 factory.addStep(ShellCommand(
779 name = "updatefeeds",
780 description = "Updating feeds",
781 command=["./scripts/feeds", "update"],
782 env = MakeEnv(tryccache=True),
783 haltOnFailure = True,
784 locks = NetLockDl,
785 ))
786
787 # feed
788 factory.addStep(ShellCommand(
789 name = "installfeeds",
790 description = "Installing feeds",
791 command=["./scripts/feeds", "install", "-a"],
792 env = MakeEnv(tryccache=True),
793 haltOnFailure = True
794 ))
795
796 # seed config
797 if config_seed is not None:
798 factory.addStep(StringDownload(
799 name = "dlconfigseed",
800 s = config_seed + '\n',
801 workerdest = ".config",
802 mode = 0o644
803 ))
804
805 # configure
806 factory.addStep(ShellCommand(
807 name = "newconfig",
808 description = "Seeding .config",
809 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
810 ))
811
812 factory.addStep(ShellCommand(
813 name = "delbin",
814 description = "Removing output directory",
815 command = ["rm", "-rf", "bin/"]
816 ))
817
818 factory.addStep(ShellCommand(
819 name = "defconfig",
820 description = "Populating .config",
821 command = ["make", "defconfig"],
822 env = MakeEnv()
823 ))
824
825 # check arch
826 factory.addStep(ShellCommand(
827 name = "checkarch",
828 description = "Checking architecture",
829 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
830 logEnviron = False,
831 want_stdout = False,
832 want_stderr = False,
833 haltOnFailure = True
834 ))
835
836 # find libc suffix
837 factory.addStep(SetPropertyFromCommand(
838 name = "libc",
839 property = "libc",
840 description = "Finding libc suffix",
841 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
842
843 # install build key
844 if usign_key is not None:
845 factory.addStep(StringDownload(
846 name = "dlkeybuildpub",
847 s = UsignSec2Pub(usign_key, usign_comment),
848 workerdest = "key-build.pub",
849 mode = 0o600,
850 ))
851
852 factory.addStep(StringDownload(
853 name = "dlkeybuild",
854 s = "# fake private key",
855 workerdest = "key-build",
856 mode = 0o600,
857 ))
858
859 factory.addStep(StringDownload(
860 name = "dlkeybuilducert",
861 s = "# fake certificate",
862 workerdest = "key-build.ucert",
863 mode = 0o600,
864 ))
865
866 # prepare dl
867 factory.addStep(ShellCommand(
868 name = "dldir",
869 description = "Preparing dl/",
870 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
871 logEnviron = False,
872 want_stdout = False
873 ))
874
875 # prepare tar
876 factory.addStep(ShellCommand(
877 name = "dltar",
878 description = "Building and installing GNU tar",
879 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
880 env = MakeEnv(tryccache=True),
881 haltOnFailure = True
882 ))
883
884 # populate dl
885 factory.addStep(ShellCommand(
886 name = "dlrun",
887 description = "Populating dl/",
888 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
889 env = MakeEnv(),
890 logEnviron = False,
891 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
892 ))
893
894 factory.addStep(ShellCommand(
895 name = "cleanbase",
896 description = "Cleaning base-files",
897 command=["make", "package/base-files/clean", "V=s"]
898 ))
899
900 # build
901 factory.addStep(ShellCommand(
902 name = "tools",
903 description = "Building and installing tools",
904 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
905 env = MakeEnv(tryccache=True),
906 haltOnFailure = True
907 ))
908
909 factory.addStep(ShellCommand(
910 name = "toolchain",
911 description = "Building and installing toolchain",
912 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
913 env = MakeEnv(),
914 haltOnFailure = True
915 ))
916
917 factory.addStep(ShellCommand(
918 name = "kmods",
919 description = "Building kmods",
920 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
921 env = MakeEnv(),
922 haltOnFailure = True
923 ))
924
925 # find kernel version
926 factory.addStep(SetPropertyFromCommand(
927 name = "kernelversion",
928 property = "kernelversion",
929 description = "Finding the effective Kernel version",
930 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
931 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "pkgclean",
936 description = "Cleaning up package build",
937 command=["make", "package/cleanup", "V=s"]
938 ))
939
940 factory.addStep(ShellCommand(
941 name = "pkgbuild",
942 description = "Building packages",
943 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
944 env = MakeEnv(),
945 haltOnFailure = True
946 ))
947
948 factory.addStep(ShellCommand(
949 name = "pkginstall",
950 description = "Installing packages",
951 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
952 env = MakeEnv(),
953 haltOnFailure = True
954 ))
955
956 factory.addStep(ShellCommand(
957 name = "pkgindex",
958 description = "Indexing packages",
959 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
960 env = MakeEnv(),
961 haltOnFailure = True
962 ))
963
964 factory.addStep(ShellCommand(
965 name = "images",
966 description = "Building and installing images",
967 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
968 env = MakeEnv(),
969 haltOnFailure = True
970 ))
971
972 factory.addStep(ShellCommand(
973 name = "buildinfo",
974 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
975 command = "make -j1 buildinfo V=s || true",
976 env = MakeEnv(),
977 haltOnFailure = True
978 ))
979
980 factory.addStep(ShellCommand(
981 name = "json_overview_image_info",
982 description = "Generate profiles.json in target folder",
983 command = "make -j1 json_overview_image_info V=s || true",
984 env = MakeEnv(),
985 haltOnFailure = True
986 ))
987
988 factory.addStep(ShellCommand(
989 name = "checksums",
990 description = "Calculating checksums",
991 command=["make", "-j1", "checksum", "V=s"],
992 env = MakeEnv(),
993 haltOnFailure = True
994 ))
995
996 if enable_kmod_archive:
997 factory.addStep(ShellCommand(
998 name = "kmoddir",
999 description = "Creating kmod directory",
1000 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1001 haltOnFailure = True
1002 ))
1003
1004 factory.addStep(ShellCommand(
1005 name = "kmodprepare",
1006 description = "Preparing kmod archive",
1007 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1008 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1009 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1010 haltOnFailure = True
1011 ))
1012
1013 factory.addStep(ShellCommand(
1014 name = "kmodindex",
1015 description = "Indexing kmod archive",
1016 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1017 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1018 env = MakeEnv(),
1019 haltOnFailure = True
1020 ))
1021
1022 # sign
1023 if ini.has_option("gpg", "key") or usign_key is not None:
1024 factory.addStep(MasterShellCommand(
1025 name = "signprepare",
1026 description = "Preparing temporary signing directory",
1027 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1028 haltOnFailure = True
1029 ))
1030
1031 factory.addStep(ShellCommand(
1032 name = "signpack",
1033 description = "Packing files to sign",
1034 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1035 haltOnFailure = True
1036 ))
1037
1038 factory.addStep(FileUpload(
1039 workersrc = "sign.tar.gz",
1040 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1041 haltOnFailure = True
1042 ))
1043
1044 factory.addStep(MasterShellCommand(
1045 name = "signfiles",
1046 description = "Signing files",
1047 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1048 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1049 haltOnFailure = True
1050 ))
1051
1052 factory.addStep(FileDownload(
1053 name = "dlsigntargz",
1054 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1055 workerdest = "sign.tar.gz",
1056 haltOnFailure = True
1057 ))
1058
1059 factory.addStep(ShellCommand(
1060 name = "signunpack",
1061 description = "Unpacking signed files",
1062 command = ["tar", "-xzf", "sign.tar.gz"],
1063 haltOnFailure = True
1064 ))
1065
1066 # upload
1067 factory.addStep(ShellCommand(
1068 name = "dirprepare",
1069 description = "Preparing upload directory structure",
1070 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1071 haltOnFailure = True
1072 ))
1073
1074 factory.addStep(ShellCommand(
1075 name = "linkprepare",
1076 description = "Preparing repository symlink",
1077 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1078 doStepIf = IsNoMasterBuild,
1079 haltOnFailure = True
1080 ))
1081
1082 if enable_kmod_archive:
1083 factory.addStep(ShellCommand(
1084 name = "kmoddirprepare",
1085 description = "Preparing kmod archive upload directory",
1086 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1087 haltOnFailure = True
1088 ))
1089
1090 factory.addStep(ShellCommand(
1091 name = "dirupload",
1092 description = "Uploading directory structure",
1093 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1094 env={'RSYNC_PASSWORD': rsync_bin_key},
1095 haltOnFailure = True,
1096 logEnviron = False,
1097 locks = NetLockUl,
1098 ))
1099
1100 # download remote sha256sums to 'target-sha256sums'
1101 factory.addStep(ShellCommand(
1102 name = "target-sha256sums",
1103 description = "Fetching remote sha256sums for target",
1104 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1105 env={'RSYNC_PASSWORD': rsync_bin_key},
1106 logEnviron = False,
1107 haltOnFailure = False,
1108 flunkOnFailure = False,
1109 warnOnFailure = False,
1110 ))
1111
1112 # build list of files to upload
1113 factory.addStep(FileDownload(
1114 name = "dlsha2rsyncpl",
1115 mastersrc = scripts_dir + '/sha2rsync.pl',
1116 workerdest = "../sha2rsync.pl",
1117 mode = 0o755,
1118 ))
1119
1120 factory.addStep(ShellCommand(
1121 name = "buildlist",
1122 description = "Building list of files to upload",
1123 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1124 haltOnFailure = True,
1125 ))
1126
1127 factory.addStep(FileDownload(
1128 name = "dlrsync.sh",
1129 mastersrc = scripts_dir + '/rsync.sh',
1130 workerdest = "../rsync.sh",
1131 mode = 0o755
1132 ))
1133
1134 # upload new files and update existing ones
1135 factory.addStep(ShellCommand(
1136 name = "targetupload",
1137 description = "Uploading target files",
1138 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1139 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1140 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1141 env={'RSYNC_PASSWORD': rsync_bin_key},
1142 haltOnFailure = True,
1143 logEnviron = False,
1144 ))
1145
1146 # delete files which don't exist locally
1147 factory.addStep(ShellCommand(
1148 name = "targetprune",
1149 description = "Pruning target files",
1150 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1151 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1152 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1153 env={'RSYNC_PASSWORD': rsync_bin_key},
1154 haltOnFailure = True,
1155 logEnviron = False,
1156 locks = NetLockUl,
1157 ))
1158
1159 if enable_kmod_archive:
1160 factory.addStep(ShellCommand(
1161 name = "kmodupload",
1162 description = "Uploading kmod archive",
1163 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1164 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1165 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1166 env={'RSYNC_PASSWORD': rsync_bin_key},
1167 haltOnFailure = True,
1168 logEnviron = False,
1169 locks = NetLockUl,
1170 ))
1171
1172 if rsync_src_url is not None:
1173 factory.addStep(ShellCommand(
1174 name = "sourcelist",
1175 description = "Finding source archives to upload",
1176 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1177 haltOnFailure = True
1178 ))
1179
1180 factory.addStep(ShellCommand(
1181 name = "sourceupload",
1182 description = "Uploading source archives",
1183 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1184 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1185 env={'RSYNC_PASSWORD': rsync_src_key},
1186 haltOnFailure = True,
1187 logEnviron = False,
1188 locks = NetLockUl,
1189 ))
1190
1191 factory.addStep(ShellCommand(
1192 name = "df",
1193 description = "Reporting disk usage",
1194 command=["df", "-h", "."],
1195 env={'LC_ALL': 'C'},
1196 haltOnFailure = False,
1197 flunkOnFailure = False,
1198 warnOnFailure = False,
1199 alwaysRun = True
1200 ))
1201
1202 factory.addStep(ShellCommand(
1203 name = "du",
1204 description = "Reporting estimated file space usage",
1205 command=["du", "-sh", "."],
1206 env={'LC_ALL': 'C'},
1207 haltOnFailure = False,
1208 flunkOnFailure = False,
1209 warnOnFailure = False,
1210 alwaysRun = True
1211 ))
1212
1213 factory.addStep(ShellCommand(
1214 name = "ccachestat",
1215 description = "Reporting ccache stats",
1216 command=["ccache", "-s"],
1217 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1218 want_stderr = False,
1219 haltOnFailure = False,
1220 flunkOnFailure = False,
1221 warnOnFailure = False,
1222 alwaysRun = True,
1223 ))
1224
1225 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1226
1227 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1228 force_factory.addStep(steps.Trigger(
1229 name = "trigger_%s" % target,
1230 description = "Triggering %s build" % target,
1231 schedulerNames = [ "trigger_%s" % target ],
1232 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1233 doStepIf = IsTargetSelected(target)
1234 ))
1235
1236
1237 ####### STATUS TARGETS
1238
1239 # 'status' is a list of Status Targets. The results of each build will be
1240 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1241 # including web pages, email senders, and IRC bots.
1242
1243 if "status_bind" in inip1:
1244 c['www'] = {
1245 'port': inip1.get("status_bind"),
1246 'plugins': {
1247 'waterfall_view': True,
1248 'console_view': True,
1249 'grid_view': True
1250 }
1251 }
1252
1253 if "status_user" in inip1 and "status_password" in inip1:
1254 c['www']['auth'] = util.UserPasswordAuth([
1255 (inip1.get("status_user"), inip1.get("status_password"))
1256 ])
1257 c['www']['authz'] = util.Authz(
1258 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1259 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1260 )
1261
1262 c['services'] = []
1263 if ini.has_section("irc"):
1264 iniirc = ini['irc']
1265 irc_host = iniirc.get("host", None)
1266 irc_port = iniirc.getint("port", 6667)
1267 irc_chan = iniirc.get("channel", None)
1268 irc_nick = iniirc.get("nickname", None)
1269 irc_pass = iniirc.get("password", None)
1270
1271 if irc_host and irc_nick and irc_chan:
1272 irc = reporters.IRC(irc_host, irc_nick,
1273 port = irc_port,
1274 password = irc_pass,
1275 channels = [ irc_chan ],
1276 notify_events = [ 'exception', 'problem', 'recovery' ]
1277 )
1278
1279 c['services'].append(irc)
1280
1281 c['revlink'] = util.RevlinkMatch([
1282 r'https://git.openwrt.org/openwrt/(.*).git'
1283 ],
1284 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1285
1286 ####### DB URL
1287
1288 c['db'] = {
1289 # This specifies what database buildbot uses to store its state. You can leave
1290 # this at its default for all but the largest installations.
1291 'db_url' : "sqlite:///state.sqlite",
1292 }
1293
1294 c['buildbotNetUsageData'] = None