95a6feee0510a2e6761edd12f45d47ea80f20d0a
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41
42
43 if not os.path.exists("twistd.pid"):
44 with open("twistd.pid", "w") as pidfile:
45 pidfile.write("{}".format(os.getpid()))
46
47 # This is a sample buildmaster config file. It must be installed as
48 # 'master.cfg' in your buildmaster's base directory.
49
50 ini = configparser.ConfigParser()
51 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
52
53 if "general" not in ini or "phase1" not in ini:
54 raise ValueError("Fix your configuration")
55
56 inip1 = ini['phase1']
57
58 # Globals
59 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
60 scripts_dir = os.path.abspath("../scripts")
61
62 repo_url = ini['repo'].get("url")
63
64 rsync_defopts = ["-v", "-4", "--timeout=120"]
65
66 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
67 # rsync_bin_defopts += ["--contimeout=20"]
68
69 branches = {}
70
71 def ini_parse_branch(section):
72 b = {}
73 name = section.get("name")
74
75 if not name:
76 raise ValueError("missing 'name' in " + repr(section))
77 if name in branches:
78 raise ValueError("duplicate branch name in " + repr(section))
79
80 b["name"] = name
81 b["bin_url"] = section.get("binary_url")
82 b["bin_key"] = section.get("binary_password")
83
84 b["src_url"] = section.get("source_url")
85 b["src_key"] = section.get("source_password")
86
87 b["gpg_key"] = section.get("gpg_key")
88
89 b["usign_key"] = section.get("usign_key")
90 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
91 b["usign_comment"] = section.get("usign_comment", usign_comment)
92
93 b["config_seed"] = section.get("config_seed")
94
95 b["kmod_archive"] = section.getboolean("kmod_archive", False)
96
97 branches[name] = b
98 log.msg("Configured branch: {}".format(name))
99
100 # PB port can be either a numeric port or a connection string
101 pb_port = inip1.get("port") or 9989
102
103 # This is the dictionary that the buildmaster pays attention to. We also use
104 # a shorter alias to save typing.
105 c = BuildmasterConfig = {}
106
107 ####### PROJECT IDENTITY
108
109 # the 'title' string will appear at the top of this buildbot
110 # installation's html.WebStatus home page (linked to the
111 # 'titleURL') and is embedded in the title of the waterfall HTML page.
112
113 c['title'] = ini['general'].get("title")
114 c['titleURL'] = ini['general'].get("title_url")
115
116 # the 'buildbotURL' string should point to the location where the buildbot's
117 # internal web server (usually the html.WebStatus page) is visible. This
118 # typically uses the port number set in the Waterfall 'status' entry, but
119 # with an externally-visible host name which the buildbot cannot figure out
120 # without some help.
121
122 c['buildbotURL'] = inip1.get("buildbot_url")
123
124 ####### BUILDWORKERS
125
126 # The 'workers' list defines the set of recognized buildworkers. Each element is
127 # a Worker object, specifying a unique worker name and password. The same
128 # worker name and password must be configured on the worker.
129
130 c['workers'] = []
131 NetLocks = dict()
132
133 for section in ini.sections():
134 if section.startswith("branch "):
135 ini_parse_branch(ini[section])
136
137 if section.startswith("worker "):
138 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
139 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
140 sl_props = { 'dl_lock':None, 'ul_lock':None }
141 name = ini.get(section, "name")
142 password = ini.get(section, "password")
143 if ini.has_option(section, "dl_lock"):
144 lockname = ini.get(section, "dl_lock")
145 sl_props['dl_lock'] = lockname
146 if lockname not in NetLocks:
147 NetLocks[lockname] = locks.MasterLock(lockname)
148 if ini.has_option(section, "ul_lock"):
149 lockname = ini.get(section, "ul_lock")
150 sl_props['ul_lock'] = lockname
151 if lockname not in NetLocks:
152 NetLocks[lockname] = locks.MasterLock(lockname)
153 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
154
155 c['protocols'] = {'pb': {'port': pb_port}}
156
157 # coalesce builds
158 c['collapseRequests'] = True
159
160 # Reduce amount of backlog data
161 c['configurators'] = [util.JanitorConfigurator(
162 logHorizon=timedelta(days=3),
163 hour=6,
164 )]
165
166 @defer.inlineCallbacks
167 def getNewestCompleteTime(bldr):
168 """Returns the complete_at of the latest completed and not SKIPPED
169 build request for this builder, or None if there are no such build
170 requests. We need to filter out SKIPPED requests because we're
171 using collapseRequests=True which is unfortunately marking all
172 previous requests as complete when new buildset is created.
173
174 @returns: datetime instance or None, via Deferred
175 """
176
177 bldrid = yield bldr.getBuilderId()
178 completed = yield bldr.master.data.get(
179 ('builders', bldrid, 'buildrequests'),
180 [
181 resultspec.Filter('complete', 'eq', [True]),
182 resultspec.Filter('results', 'ne', [results.SKIPPED]),
183 ],
184 order=['-complete_at'], limit=1)
185 if not completed:
186 return
187
188 complete_at = completed[0]['complete_at']
189
190 last_build = yield bldr.master.data.get(
191 ('builds', ),
192 [
193 resultspec.Filter('builderid', 'eq', [bldrid]),
194 ],
195 order=['-started_at'], limit=1)
196
197 if last_build and last_build[0]:
198 last_complete_at = last_build[0]['complete_at']
199 if last_complete_at and (last_complete_at > complete_at):
200 return last_complete_at
201
202 return complete_at
203
204 @defer.inlineCallbacks
205 def prioritizeBuilders(master, builders):
206 """Returns sorted list of builders by their last timestamp of completed and
207 not skipped build.
208
209 @returns: list of sorted builders
210 """
211
212 def is_building(bldr):
213 return bool(bldr.building) or bool(bldr.old_building)
214
215 def bldr_info(bldr):
216 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
217 d.addCallback(lambda complete_at: (complete_at, bldr))
218 return d
219
220 def bldr_sort(item):
221 (complete_at, bldr) = item
222
223 if bldr.name == "00_force_build":
224 date = datetime.min
225 complete_at = date.replace(tzinfo=tzutc())
226 return (complete_at, bldr.name)
227
228 if not complete_at:
229 date = datetime.min
230 complete_at = date.replace(tzinfo=tzutc())
231
232 if is_building(bldr):
233 date = datetime.max
234 complete_at = date.replace(tzinfo=tzutc())
235
236 return (complete_at, bldr.name)
237
238 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
239 results.sort(key=bldr_sort)
240
241 for r in results:
242 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
243
244 return [r[1] for r in results]
245
246 c['prioritizeBuilders'] = prioritizeBuilders
247
248 ####### CHANGESOURCES
249
250 branchNames = [branches[b]["name"] for b in branches]
251
252 # find targets
253 targets = set()
254
255 def populateTargets():
256 log.msg("Populating targets, this will take time")
257 sourcegit = work_dir + '/source.git'
258 for branch in branchNames:
259 if os.path.isdir(sourcegit):
260 subprocess.call(["rm", "-rf", sourcegit])
261
262 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
263
264 os.makedirs(sourcegit + '/tmp', exist_ok=True)
265 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
266 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
267
268 while True:
269 line = findtargets.stdout.readline()
270 if not line:
271 break
272 ta = line.decode().strip().split(' ')
273 targets.add(ta[0])
274
275 subprocess.call(["rm", "-rf", sourcegit])
276
277 populateTargets()
278
279 # the 'change_source' setting tells the buildmaster how it should find out
280 # about source code changes. Here we point to the buildbot clone of pyflakes.
281
282 c['change_source'] = []
283 c['change_source'].append(GitPoller(
284 repo_url,
285 workdir=work_dir+'/work.git', branches=branchNames,
286 pollAtLaunch=True, pollinterval=300))
287
288 ####### SCHEDULERS
289
290 # Configure the Schedulers, which decide how to react to incoming changes. In this
291 # case, just kick off a 'basebuild' build
292
293 class TagChoiceParameter(BaseParameter):
294 spec_attributes = ["strict", "choices"]
295 type = "list"
296 strict = True
297
298 def __init__(self, name, label=None, **kw):
299 super().__init__(name, label, **kw)
300 self._choice_list = []
301
302 @property
303 def choices(self):
304 taglist = []
305 branchvers = []
306
307 for b in branchNames:
308 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
309 if basever:
310 branchvers.append(basever[1])
311
312 alltags = subprocess.Popen(
313 ['git', 'ls-remote', '--tags', repo_url],
314 stdout = subprocess.PIPE)
315
316 while True:
317 line = alltags.stdout.readline()
318
319 if not line:
320 break
321
322 (ref, tag) = line.split()
323
324 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
325
326 # only list tags matching configured branches
327 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
328 taglist.append(tagver[1])
329
330 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
331 taglist.insert(0, '')
332
333 self._choice_list = taglist
334
335 return self._choice_list
336
337 def updateFromKwargs(self, properties, kwargs, **unused):
338 tag = self.getFromKwargs(kwargs)
339 properties[self.name] = tag
340
341 # find the commit matching the tag
342 findrev = subprocess.Popen(['git', 'rev-parse', 'tags/'+tag], stdout=subprocess.PIPE, cwd=work_dir+'/work.git')
343 findrev.wait(timeout=10)
344 line = findrev.stdout.readline()
345
346 if findrev.returncode!=0 or not line:
347 raise ValidationError("Couldn't find tag")
348
349 properties['force_revision'] = line.decode().strip()
350
351 # find the branch matching the tag
352 branch = None
353 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
354 for b in branchNames:
355 if b.endswith(branchver[1]):
356 branch = b
357
358 if not branch:
359 raise ValidationError("Couldn't find branch")
360
361 properties['force_branch'] = branch
362
363 def parse_from_arg(self, s):
364 if self.strict and s not in self._choice_list:
365 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
366 return s
367
368 c['schedulers'] = []
369 c['schedulers'].append(AnyBranchScheduler(
370 name = "all",
371 change_filter = util.ChangeFilter(branch=branchNames),
372 treeStableTimer = 15*60,
373 builderNames = list(targets)))
374
375 c['schedulers'].append(ForceScheduler(
376 name = "force",
377 buttonName = "Force builds",
378 label = "Force build details",
379 builderNames = [ "00_force_build" ],
380
381 codebases = [
382 util.CodebaseParameter(
383 "",
384 label = "Repository",
385 branch = util.FixedParameter(name = "branch", default = ""),
386 revision = util.FixedParameter(name = "revision", default = ""),
387 repository = util.FixedParameter(name = "repository", default = ""),
388 project = util.FixedParameter(name = "project", default = "")
389 )
390 ],
391
392 reason = util.StringParameter(
393 name = "reason",
394 label = "Reason",
395 default = "Trigger build",
396 required = True,
397 size = 80
398 ),
399
400 properties = [
401 util.ChoiceStringParameter(
402 name = "target",
403 label = "Build target",
404 default = "all",
405 choices = [ "all" ] + list(targets)
406 ),
407 TagChoiceParameter(
408 name = "tag",
409 label = "Build tag",
410 default = ""
411 )
412 ]
413 ))
414
415 ####### BUILDERS
416
417 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
418 # what steps, and which workers can execute them. Note that any particular build will
419 # only take place on one worker.
420
421 def IsNoMasterBuild(step):
422 return step.getProperty("branch") != "master"
423
424 def IsUsignEnabled(step):
425 branch = step.getProperty("branch")
426 return branch and branches[branch].get("usign_key")
427
428 def IsSignEnabled(step):
429 branch = step.getProperty("branch")
430 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
431
432 def IsKmodArchiveEnabled(step):
433 branch = step.getProperty("branch")
434 return branch and branches[branch].get("kmod_archive")
435
436 def GetBaseVersion(branch):
437 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
438 return branch.split('-')[1]
439 else:
440 return "master"
441
442 @properties.renderer
443 def GetVersionPrefix(props):
444 branch = props.getProperty("branch")
445 basever = GetBaseVersion(branch)
446 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
447 return "%s/" % props["tag"][1:]
448 elif basever != "master":
449 return "%s-SNAPSHOT/" % basever
450 else:
451 return ""
452
453 @util.renderer
454 def GetConfigSeed(props):
455 branch = props.getProperty("branch")
456 return branch and branches[branch].get("config_seed") or ""
457
458 @util.renderer
459 def GetRsyncParams(props, srcorbin, urlorkey):
460 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
461 branch = props.getProperty("branch")
462 opt = srcorbin + "_" + urlorkey
463 return branch and branches[branch].get(opt)
464
465 @util.renderer
466 def GetUsignKey(props):
467 branch = props.getProperty("branch")
468 return branch and branches[branch].get("usign_key")
469
470 def GetNextBuild(builder, requests):
471 for r in requests:
472 if r.properties:
473 # order tagged build first
474 if r.properties.hasProperty("tag"):
475 return r
476 # then order by branch order
477 pbranch = r.properties.getProperty("branch")
478 for name in branchNames:
479 if pbranch == name:
480 return r
481
482 r = requests[0]
483 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
484 return r
485
486 def MakeEnv(overrides=None, tryccache=False):
487 env = {
488 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
489 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
490 }
491 if tryccache:
492 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
493 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
494 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
495 else:
496 env['CC'] = env['CCC']
497 env['CXX'] = env['CCXX']
498 env['CCACHE'] = ''
499 if overrides is not None:
500 env.update(overrides)
501 return env
502
503 @properties.renderer
504 def NetLockDl(props, extralock=None):
505 lock = None
506 locks = []
507 if props.hasProperty("dl_lock"):
508 lock = NetLocks[props["dl_lock"]]
509 if lock is not None:
510 locks.append(lock.access('exclusive'))
511 if extralock is not None:
512 locks.append(extralock)
513 return locks
514
515 @properties.renderer
516 def NetLockUl(props):
517 lock = None
518 if props.hasProperty("ul_lock"):
519 lock = NetLocks[props["ul_lock"]]
520 if lock is not None:
521 return [lock.access('exclusive')]
522 else:
523 return []
524
525 def IsTargetSelected(target):
526 def CheckTargetProperty(step):
527 selected_target = step.getProperty("target", "all")
528 if selected_target != "all" and selected_target != target:
529 return False
530 return True
531
532 return CheckTargetProperty
533
534 @util.renderer
535 def UsignSec2Pub(props):
536 branch = props.getProperty("branch")
537 try:
538 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
539 seckey = branches[branch].get("usign_key")
540 seckey = base64.b64decode(seckey)
541 except:
542 return None
543
544 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
545 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
546
547
548 c['builders'] = []
549
550 dlLock = locks.WorkerLock("worker_dl")
551
552 workerNames = [ ]
553
554 for worker in c['workers']:
555 workerNames.append(worker.workername)
556
557 force_factory = BuildFactory()
558
559 c['builders'].append(BuilderConfig(
560 name = "00_force_build",
561 workernames = workerNames,
562 factory = force_factory))
563
564 for target in targets:
565 ts = target.split('/')
566
567 factory = BuildFactory()
568
569 # setup shared work directory if required
570 factory.addStep(ShellCommand(
571 name = "sharedwd",
572 descriptionDone = "Shared work directory set up",
573 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
574 workdir = ".",
575 haltOnFailure = True,
576 ))
577
578 # find number of cores
579 factory.addStep(SetPropertyFromCommand(
580 name = "nproc",
581 property = "nproc",
582 description = "Finding number of CPUs",
583 command = ["nproc"],
584 ))
585
586 # find gcc and g++ compilers
587 factory.addStep(FileDownload(
588 name = "dlfindbinpl",
589 mastersrc = scripts_dir + '/findbin.pl',
590 workerdest = "../findbin.pl",
591 mode = 0o755,
592 ))
593
594 factory.addStep(SetPropertyFromCommand(
595 name = "gcc",
596 property = "cc_command",
597 description = "Finding gcc command",
598 command = ["../findbin.pl", "gcc", "", ""],
599 haltOnFailure = True,
600 ))
601
602 factory.addStep(SetPropertyFromCommand(
603 name = "g++",
604 property = "cxx_command",
605 description = "Finding g++ command",
606 command = ["../findbin.pl", "g++", "", ""],
607 haltOnFailure = True,
608 ))
609
610 # see if ccache is available
611 factory.addStep(SetPropertyFromCommand(
612 name = "ccache",
613 property = "ccache_command",
614 description = "Testing for ccache command",
615 command = ["which", "ccache"],
616 haltOnFailure = False,
617 flunkOnFailure = False,
618 warnOnFailure = False,
619 hideStepIf = lambda r, s: r==results.FAILURE,
620 ))
621
622 # check out the source
623 # Git() runs:
624 # if repo doesn't exist: 'git clone repourl'
625 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
626 # git cat-file -e <commit>
627 # git checkout -f <commit>
628 # git checkout -B <branch>
629 # git rev-parse HEAD
630 factory.addStep(Git(
631 name = "git",
632 repourl = repo_url,
633 mode = 'full',
634 method = 'fresh',
635 locks = NetLockDl,
636 haltOnFailure = True,
637 ))
638
639 # update remote refs
640 factory.addStep(ShellCommand(
641 name = "fetchrefs",
642 description = "Fetching Git remote refs",
643 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
644 haltOnFailure = True,
645 ))
646
647 # Verify that Git HEAD points to a tag or branch
648 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
649 factory.addStep(ShellCommand(
650 name = "gitverify",
651 description = "Ensure that Git HEAD is pointing to a branch or tag",
652 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
653 haltOnFailure = True,
654 ))
655
656 factory.addStep(ShellCommand(
657 name = "rmtmp",
658 description = "Remove tmp folder",
659 command=["rm", "-rf", "tmp/"],
660 ))
661
662 # feed
663 factory.addStep(ShellCommand(
664 name = "rmfeedlinks",
665 description = "Remove feed symlinks",
666 command=["rm", "-rf", "package/feeds/"],
667 ))
668
669 factory.addStep(StringDownload(
670 name = "ccachecc",
671 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
672 workerdest = "../ccache_cc.sh",
673 mode = 0o755,
674 ))
675
676 factory.addStep(StringDownload(
677 name = "ccachecxx",
678 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
679 workerdest = "../ccache_cxx.sh",
680 mode = 0o755,
681 ))
682
683 # feed
684 factory.addStep(ShellCommand(
685 name = "updatefeeds",
686 description = "Updating feeds",
687 command=["./scripts/feeds", "update"],
688 env = MakeEnv(tryccache=True),
689 haltOnFailure = True,
690 locks = NetLockDl,
691 ))
692
693 # feed
694 factory.addStep(ShellCommand(
695 name = "installfeeds",
696 description = "Installing feeds",
697 command=["./scripts/feeds", "install", "-a"],
698 env = MakeEnv(tryccache=True),
699 haltOnFailure = True,
700 ))
701
702 # seed config
703 factory.addStep(StringDownload(
704 name = "dlconfigseed",
705 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
706 workerdest = ".config",
707 mode = 0o644,
708 ))
709
710 # configure
711 factory.addStep(ShellCommand(
712 name = "newconfig",
713 descriptionDone = ".config seeded",
714 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
715 ))
716
717 factory.addStep(ShellCommand(
718 name = "delbin",
719 description = "Removing output directory",
720 command = ["rm", "-rf", "bin/"],
721 ))
722
723 factory.addStep(ShellCommand(
724 name = "defconfig",
725 description = "Populating .config",
726 command = ["make", "defconfig"],
727 env = MakeEnv(),
728 ))
729
730 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
731 factory.addStep(ShellCommand(
732 name = "checkarch",
733 description = "Checking architecture",
734 descriptionDone = "Architecture validated",
735 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
736 logEnviron = False,
737 want_stdout = False,
738 want_stderr = False,
739 haltOnFailure = True,
740 flunkOnFailure = False, # this is not a build FAILURE
741 ))
742
743 # find libc suffix
744 factory.addStep(SetPropertyFromCommand(
745 name = "libc",
746 property = "libc",
747 description = "Finding libc suffix",
748 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
749 ))
750
751 # install build key
752 factory.addStep(StringDownload(
753 name = "dlkeybuildpub",
754 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
755 workerdest = "key-build.pub",
756 mode = 0o600,
757 doStepIf = IsUsignEnabled,
758 ))
759
760 factory.addStep(StringDownload(
761 name = "dlkeybuild",
762 s = "# fake private key",
763 workerdest = "key-build",
764 mode = 0o600,
765 doStepIf = IsUsignEnabled,
766 ))
767
768 factory.addStep(StringDownload(
769 name = "dlkeybuilducert",
770 s = "# fake certificate",
771 workerdest = "key-build.ucert",
772 mode = 0o600,
773 doStepIf = IsUsignEnabled,
774 ))
775
776 # prepare dl
777 factory.addStep(ShellCommand(
778 name = "dldir",
779 description = "Preparing dl/",
780 descriptionDone = "dl/ prepared",
781 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
782 workdir = Property("builddir"),
783 logEnviron = False,
784 want_stdout = False,
785 ))
786
787 # prepare tar
788 factory.addStep(ShellCommand(
789 name = "dltar",
790 description = "Building and installing GNU tar",
791 descriptionDone = "GNU tar built and installed",
792 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
793 env = MakeEnv(tryccache=True),
794 haltOnFailure = True,
795 ))
796
797 # populate dl
798 factory.addStep(ShellCommand(
799 name = "dlrun",
800 description = "Populating dl/",
801 descriptionDone = "dl/ populated",
802 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
803 env = MakeEnv(),
804 logEnviron = False,
805 locks = NetLockDl.withArgs(dlLock.access('exclusive')),
806 ))
807
808 factory.addStep(ShellCommand(
809 name = "cleanbase",
810 description = "Cleaning base-files",
811 command=["make", "package/base-files/clean", "V=s"],
812 ))
813
814 # build
815 factory.addStep(ShellCommand(
816 name = "tools",
817 description = "Building and installing tools",
818 descriptionDone = "Tools built and installed",
819 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
820 env = MakeEnv(tryccache=True),
821 haltOnFailure = True,
822 ))
823
824 factory.addStep(ShellCommand(
825 name = "toolchain",
826 description = "Building and installing toolchain",
827 descriptionDone = "Toolchain built and installed",
828 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
829 env = MakeEnv(),
830 haltOnFailure = True,
831 ))
832
833 factory.addStep(ShellCommand(
834 name = "kmods",
835 description = "Building kmods",
836 descriptionDone = "Kmods built",
837 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
838 env = MakeEnv(),
839 haltOnFailure = True,
840 ))
841
842 # find kernel version
843 factory.addStep(SetPropertyFromCommand(
844 name = "kernelversion",
845 property = "kernelversion",
846 description = "Finding the effective Kernel version",
847 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
848 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
849 ))
850
851 factory.addStep(ShellCommand(
852 name = "pkgclean",
853 description = "Cleaning up package build",
854 descriptionDone = "Package build cleaned up",
855 command=["make", "package/cleanup", "V=s"],
856 ))
857
858 factory.addStep(ShellCommand(
859 name = "pkgbuild",
860 description = "Building packages",
861 descriptionDone = "Packages built",
862 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
863 env = MakeEnv(),
864 haltOnFailure = True,
865 ))
866
867 factory.addStep(ShellCommand(
868 name = "pkginstall",
869 description = "Installing packages",
870 descriptionDone = "Packages installed",
871 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
872 env = MakeEnv(),
873 haltOnFailure = True,
874 ))
875
876 factory.addStep(ShellCommand(
877 name = "pkgindex",
878 description = "Indexing packages",
879 descriptionDone = "Packages indexed",
880 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
881 env = MakeEnv(),
882 haltOnFailure = True,
883 ))
884
885 factory.addStep(ShellCommand(
886 name = "images",
887 description = "Building and installing images",
888 descriptionDone = "Images built and installed",
889 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
890 env = MakeEnv(),
891 haltOnFailure = True,
892 ))
893
894 factory.addStep(ShellCommand(
895 name = "buildinfo",
896 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
897 command = "make -j1 buildinfo V=s || true",
898 env = MakeEnv(),
899 haltOnFailure = True,
900 ))
901
902 factory.addStep(ShellCommand(
903 name = "json_overview_image_info",
904 description = "Generating profiles.json in target folder",
905 command = "make -j1 json_overview_image_info V=s || true",
906 env = MakeEnv(),
907 haltOnFailure = True,
908 ))
909
910 factory.addStep(ShellCommand(
911 name = "checksums",
912 description = "Calculating checksums",
913 descriptionDone = "Checksums calculated",
914 command=["make", "-j1", "checksum", "V=s"],
915 env = MakeEnv(),
916 haltOnFailure = True,
917 ))
918
919 factory.addStep(ShellCommand(
920 name = "kmoddir",
921 descriptionDone = "Kmod directory created",
922 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
923 haltOnFailure = True,
924 doStepIf = IsKmodArchiveEnabled,
925 ))
926
927 factory.addStep(ShellCommand(
928 name = "kmodprepare",
929 description = "Preparing kmod archive",
930 descriptionDone = "Kmod archive prepared",
931 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
932 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
933 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
934 haltOnFailure = True,
935 doStepIf = IsKmodArchiveEnabled,
936 ))
937
938 factory.addStep(ShellCommand(
939 name = "kmodindex",
940 description = "Indexing kmod archive",
941 descriptionDone = "Kmod archive indexed",
942 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
943 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
944 env = MakeEnv(),
945 haltOnFailure = True,
946 doStepIf = IsKmodArchiveEnabled,
947 ))
948
949 # sign
950 factory.addStep(MasterShellCommand(
951 name = "signprepare",
952 descriptionDone = "Temporary signing directory prepared",
953 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
954 haltOnFailure = True,
955 doStepIf = IsSignEnabled,
956
957 ))
958
959 factory.addStep(ShellCommand(
960 name = "signpack",
961 description = "Packing files to sign",
962 descriptionDone = "Files to sign packed",
963 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
964 haltOnFailure = True,
965 doStepIf = IsSignEnabled,
966 ))
967
968 factory.addStep(FileUpload(
969 workersrc = "sign.tar.gz",
970 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
971 haltOnFailure = True,
972 doStepIf = IsSignEnabled,
973 ))
974
975 factory.addStep(MasterShellCommand(
976 name = "signfiles",
977 description = "Signing files",
978 descriptionDone = "Files signed",
979 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
980 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
981 haltOnFailure = True,
982 doStepIf = IsSignEnabled,
983 ))
984
985 factory.addStep(FileDownload(
986 name = "dlsigntargz",
987 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
988 workerdest = "sign.tar.gz",
989 haltOnFailure = True,
990 doStepIf = IsSignEnabled,
991 ))
992
993 factory.addStep(ShellCommand(
994 name = "signunpack",
995 description = "Unpacking signed files",
996 descriptionDone = "Signed files unpacked",
997 command = ["tar", "-xzf", "sign.tar.gz"],
998 haltOnFailure = True,
999 doStepIf = IsSignEnabled,
1000 ))
1001
1002 # upload
1003 factory.addStep(ShellCommand(
1004 name = "dirprepare",
1005 descriptionDone = "Upload directory structure prepared",
1006 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1007 haltOnFailure = True,
1008 ))
1009
1010 factory.addStep(ShellCommand(
1011 name = "linkprepare",
1012 descriptionDone = "Repository symlink prepared",
1013 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1014 doStepIf = IsNoMasterBuild,
1015 haltOnFailure = True,
1016 ))
1017
1018 factory.addStep(ShellCommand(
1019 name = "kmoddirprepare",
1020 descriptionDone = "Kmod archive upload directory prepared",
1021 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1022 haltOnFailure = True,
1023 doStepIf = IsKmodArchiveEnabled,
1024 ))
1025
1026 factory.addStep(ShellCommand(
1027 name = "dirupload",
1028 description = "Uploading directory structure",
1029 descriptionDone = "Directory structure uploaded",
1030 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1031 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1032 haltOnFailure = True,
1033 logEnviron = False,
1034 locks = NetLockUl,
1035 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1036 ))
1037
1038 # download remote sha256sums to 'target-sha256sums'
1039 factory.addStep(ShellCommand(
1040 name = "target-sha256sums",
1041 description = "Fetching remote sha256sums for target",
1042 descriptionDone = "Remote sha256sums for target fetched",
1043 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1044 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1045 logEnviron = False,
1046 haltOnFailure = False,
1047 flunkOnFailure = False,
1048 warnOnFailure = False,
1049 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1050 ))
1051
1052 # build list of files to upload
1053 factory.addStep(FileDownload(
1054 name = "dlsha2rsyncpl",
1055 mastersrc = scripts_dir + '/sha2rsync.pl',
1056 workerdest = "../sha2rsync.pl",
1057 mode = 0o755,
1058 ))
1059
1060 factory.addStep(ShellCommand(
1061 name = "buildlist",
1062 description = "Building list of files to upload",
1063 descriptionDone = "List of files to upload built",
1064 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1065 haltOnFailure = True,
1066 ))
1067
1068 factory.addStep(FileDownload(
1069 name = "dlrsync.sh",
1070 mastersrc = scripts_dir + '/rsync.sh',
1071 workerdest = "../rsync.sh",
1072 mode = 0o755,
1073 ))
1074
1075 # upload new files and update existing ones
1076 factory.addStep(ShellCommand(
1077 name = "targetupload",
1078 description = "Uploading target files",
1079 descriptionDone = "Target files uploaded",
1080 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1081 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1082 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1083 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1084 haltOnFailure = True,
1085 logEnviron = False,
1086 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1087 ))
1088
1089 # delete files which don't exist locally
1090 factory.addStep(ShellCommand(
1091 name = "targetprune",
1092 description = "Pruning target files",
1093 descriptionDone = "Target files pruned",
1094 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1095 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1096 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1097 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1098 haltOnFailure = True,
1099 logEnviron = False,
1100 locks = NetLockUl,
1101 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1102 ))
1103
1104 factory.addStep(ShellCommand(
1105 name = "kmodupload",
1106 description = "Uploading kmod archive",
1107 descriptionDone = "Kmod archive uploaded",
1108 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1109 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1110 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1111 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1112 haltOnFailure = True,
1113 logEnviron = False,
1114 locks = NetLockUl,
1115 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1116 ))
1117
1118 factory.addStep(ShellCommand(
1119 name = "sourcelist",
1120 description = "Finding source archives to upload",
1121 descriptionDone = "Source archives to upload found",
1122 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1123 haltOnFailure = True,
1124 ))
1125
1126 factory.addStep(ShellCommand(
1127 name = "sourceupload",
1128 description = "Uploading source archives",
1129 descriptionDone = "Source archives uploaded",
1130 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1131 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1132 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1133 haltOnFailure = True,
1134 logEnviron = False,
1135 locks = NetLockUl,
1136 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1137 ))
1138
1139 factory.addStep(ShellCommand(
1140 name = "df",
1141 description = "Reporting disk usage",
1142 command=["df", "-h", "."],
1143 env={'LC_ALL': 'C'},
1144 logEnviron = False,
1145 haltOnFailure = False,
1146 flunkOnFailure = False,
1147 warnOnFailure = False,
1148 alwaysRun = True,
1149 ))
1150
1151 factory.addStep(ShellCommand(
1152 name = "du",
1153 description = "Reporting estimated file space usage",
1154 command=["du", "-sh", "."],
1155 env={'LC_ALL': 'C'},
1156 logEnviron = False,
1157 haltOnFailure = False,
1158 flunkOnFailure = False,
1159 warnOnFailure = False,
1160 alwaysRun = True,
1161 ))
1162
1163 factory.addStep(ShellCommand(
1164 name = "ccachestat",
1165 description = "Reporting ccache stats",
1166 command=["ccache", "-s"],
1167 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1168 logEnviron = False,
1169 want_stderr = False,
1170 haltOnFailure = False,
1171 flunkOnFailure = False,
1172 warnOnFailure = False,
1173 hideStepIf = lambda r, s: r==results.FAILURE,
1174 ))
1175
1176 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1177
1178 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1179 force_factory.addStep(steps.Trigger(
1180 name = "trigger_%s" % target,
1181 description = "Triggering %s build" % target,
1182 schedulerNames = [ "trigger_%s" % target ],
1183 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
1184 set_properties = { "reason": Property("reason"), "tag": Property("tag"), },
1185 doStepIf = IsTargetSelected(target),
1186 ))
1187
1188
1189 ####### STATUS TARGETS
1190
1191 # 'status' is a list of Status Targets. The results of each build will be
1192 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1193 # including web pages, email senders, and IRC bots.
1194
1195 if "status_bind" in inip1:
1196 c['www'] = {
1197 'port': inip1.get("status_bind"),
1198 'plugins': {
1199 'waterfall_view': True,
1200 'console_view': True,
1201 'grid_view': True
1202 }
1203 }
1204
1205 if "status_user" in inip1 and "status_password" in inip1:
1206 c['www']['auth'] = util.UserPasswordAuth([
1207 (inip1.get("status_user"), inip1.get("status_password"))
1208 ])
1209 c['www']['authz'] = util.Authz(
1210 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1211 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1212 )
1213
1214 c['services'] = []
1215 if ini.has_section("irc"):
1216 iniirc = ini['irc']
1217 irc_host = iniirc.get("host", None)
1218 irc_port = iniirc.getint("port", 6667)
1219 irc_chan = iniirc.get("channel", None)
1220 irc_nick = iniirc.get("nickname", None)
1221 irc_pass = iniirc.get("password", None)
1222
1223 if irc_host and irc_nick and irc_chan:
1224 irc = reporters.IRC(irc_host, irc_nick,
1225 port = irc_port,
1226 password = irc_pass,
1227 channels = [ irc_chan ],
1228 notify_events = [ 'exception', 'problem', 'recovery' ]
1229 )
1230
1231 c['services'].append(irc)
1232
1233 c['revlink'] = util.RevlinkMatch([
1234 r'https://git.openwrt.org/openwrt/(.*).git'
1235 ],
1236 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1237
1238 ####### DB URL
1239
1240 c['db'] = {
1241 # This specifies what database buildbot uses to store its state. You can leave
1242 # this at its default for all but the largest installations.
1243 'db_url' : "sqlite:///state.sqlite",
1244 }
1245
1246 c['buildbotNetUsageData'] = None