phase1: prune unused files from dl/
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41
42
43 if not os.path.exists("twistd.pid"):
44 with open("twistd.pid", "w") as pidfile:
45 pidfile.write("{}".format(os.getpid()))
46
47 # This is a sample buildmaster config file. It must be installed as
48 # 'master.cfg' in your buildmaster's base directory.
49
50 ini = configparser.ConfigParser()
51 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
52
53 if "general" not in ini or "phase1" not in ini:
54 raise ValueError("Fix your configuration")
55
56 inip1 = ini['phase1']
57
58 # Globals
59 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
60 scripts_dir = os.path.abspath("../scripts")
61
62 repo_url = ini['repo'].get("url")
63
64 rsync_defopts = ["-v", "-4", "--timeout=120"]
65
66 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
67 # rsync_bin_defopts += ["--contimeout=20"]
68
69 branches = {}
70
71 def ini_parse_branch(section):
72 b = {}
73 name = section.get("name")
74
75 if not name:
76 raise ValueError("missing 'name' in " + repr(section))
77 if name in branches:
78 raise ValueError("duplicate branch name in " + repr(section))
79
80 b["name"] = name
81 b["bin_url"] = section.get("binary_url")
82 b["bin_key"] = section.get("binary_password")
83
84 b["src_url"] = section.get("source_url")
85 b["src_key"] = section.get("source_password")
86
87 b["gpg_key"] = section.get("gpg_key")
88
89 b["usign_key"] = section.get("usign_key")
90 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
91 b["usign_comment"] = section.get("usign_comment", usign_comment)
92
93 b["config_seed"] = section.get("config_seed")
94
95 b["kmod_archive"] = section.getboolean("kmod_archive", False)
96
97 branches[name] = b
98 log.msg("Configured branch: {}".format(name))
99
100 # PB port can be either a numeric port or a connection string
101 pb_port = inip1.get("port") or 9989
102
103 # This is the dictionary that the buildmaster pays attention to. We also use
104 # a shorter alias to save typing.
105 c = BuildmasterConfig = {}
106
107 ####### PROJECT IDENTITY
108
109 # the 'title' string will appear at the top of this buildbot
110 # installation's html.WebStatus home page (linked to the
111 # 'titleURL') and is embedded in the title of the waterfall HTML page.
112
113 c['title'] = ini['general'].get("title")
114 c['titleURL'] = ini['general'].get("title_url")
115
116 # the 'buildbotURL' string should point to the location where the buildbot's
117 # internal web server (usually the html.WebStatus page) is visible. This
118 # typically uses the port number set in the Waterfall 'status' entry, but
119 # with an externally-visible host name which the buildbot cannot figure out
120 # without some help.
121
122 c['buildbotURL'] = inip1.get("buildbot_url")
123
124 ####### BUILDWORKERS
125
126 # The 'workers' list defines the set of recognized buildworkers. Each element is
127 # a Worker object, specifying a unique worker name and password. The same
128 # worker name and password must be configured on the worker.
129
130 c['workers'] = []
131 NetLocks = dict()
132
133 for section in ini.sections():
134 if section.startswith("branch "):
135 ini_parse_branch(ini[section])
136
137 if section.startswith("worker "):
138 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
139 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
140 sl_props = { 'dl_lock':None, 'ul_lock':None }
141 name = ini.get(section, "name")
142 password = ini.get(section, "password")
143 if ini.has_option(section, "dl_lock"):
144 lockname = ini.get(section, "dl_lock")
145 sl_props['dl_lock'] = lockname
146 if lockname not in NetLocks:
147 NetLocks[lockname] = locks.MasterLock(lockname)
148 if ini.has_option(section, "ul_lock"):
149 lockname = ini.get(section, "ul_lock")
150 sl_props['ul_lock'] = lockname
151 if lockname not in NetLocks:
152 NetLocks[lockname] = locks.MasterLock(lockname)
153 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
154
155 c['protocols'] = {'pb': {'port': pb_port}}
156
157 # coalesce builds
158 c['collapseRequests'] = True
159
160 # Reduce amount of backlog data
161 c['configurators'] = [util.JanitorConfigurator(
162 logHorizon=timedelta(days=3),
163 hour=6,
164 )]
165
166 @defer.inlineCallbacks
167 def getNewestCompleteTime(bldr):
168 """Returns the complete_at of the latest completed and not SKIPPED
169 build request for this builder, or None if there are no such build
170 requests. We need to filter out SKIPPED requests because we're
171 using collapseRequests=True which is unfortunately marking all
172 previous requests as complete when new buildset is created.
173
174 @returns: datetime instance or None, via Deferred
175 """
176
177 bldrid = yield bldr.getBuilderId()
178 completed = yield bldr.master.data.get(
179 ('builders', bldrid, 'buildrequests'),
180 [
181 resultspec.Filter('complete', 'eq', [True]),
182 resultspec.Filter('results', 'ne', [results.SKIPPED]),
183 ],
184 order=['-complete_at'], limit=1)
185 if not completed:
186 return
187
188 complete_at = completed[0]['complete_at']
189
190 last_build = yield bldr.master.data.get(
191 ('builds', ),
192 [
193 resultspec.Filter('builderid', 'eq', [bldrid]),
194 ],
195 order=['-started_at'], limit=1)
196
197 if last_build and last_build[0]:
198 last_complete_at = last_build[0]['complete_at']
199 if last_complete_at and (last_complete_at > complete_at):
200 return last_complete_at
201
202 return complete_at
203
204 @defer.inlineCallbacks
205 def prioritizeBuilders(master, builders):
206 """Returns sorted list of builders by their last timestamp of completed and
207 not skipped build.
208
209 @returns: list of sorted builders
210 """
211
212 def is_building(bldr):
213 return bool(bldr.building) or bool(bldr.old_building)
214
215 def bldr_info(bldr):
216 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
217 d.addCallback(lambda complete_at: (complete_at, bldr))
218 return d
219
220 def bldr_sort(item):
221 (complete_at, bldr) = item
222
223 if bldr.name == "00_force_build":
224 date = datetime.min
225 complete_at = date.replace(tzinfo=tzutc())
226 return (complete_at, bldr.name)
227
228 if not complete_at:
229 date = datetime.min
230 complete_at = date.replace(tzinfo=tzutc())
231
232 if is_building(bldr):
233 date = datetime.max
234 complete_at = date.replace(tzinfo=tzutc())
235
236 return (complete_at, bldr.name)
237
238 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
239 results.sort(key=bldr_sort)
240
241 for r in results:
242 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
243
244 return [r[1] for r in results]
245
246 c['prioritizeBuilders'] = prioritizeBuilders
247
248 ####### CHANGESOURCES
249
250 branchNames = [branches[b]["name"] for b in branches]
251
252 # find targets
253 targets = set()
254
255 def populateTargets():
256 log.msg("Populating targets, this will take time")
257 sourcegit = work_dir + '/source.git'
258 for branch in branchNames:
259 if os.path.isdir(sourcegit):
260 subprocess.call(["rm", "-rf", sourcegit])
261
262 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
263
264 os.makedirs(sourcegit + '/tmp', exist_ok=True)
265 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
266 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
267
268 while True:
269 line = findtargets.stdout.readline()
270 if not line:
271 break
272 ta = line.decode().strip().split(' ')
273 targets.add(ta[0])
274
275 subprocess.call(["rm", "-rf", sourcegit])
276
277 populateTargets()
278
279 # the 'change_source' setting tells the buildmaster how it should find out
280 # about source code changes. Here we point to the buildbot clone of pyflakes.
281
282 c['change_source'] = []
283 c['change_source'].append(GitPoller(
284 repo_url,
285 workdir=work_dir+'/work.git', branches=branchNames,
286 pollAtLaunch=True, pollinterval=300))
287
288 ####### SCHEDULERS
289
290 # Configure the Schedulers, which decide how to react to incoming changes. In this
291 # case, just kick off a 'basebuild' build
292
293 class TagChoiceParameter(BaseParameter):
294 spec_attributes = ["strict", "choices"]
295 type = "list"
296 strict = True
297
298 def __init__(self, name, label=None, **kw):
299 super().__init__(name, label, **kw)
300 self._choice_list = []
301
302 @property
303 def choices(self):
304 taglist = []
305 branchvers = []
306
307 for b in branchNames:
308 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
309 if basever:
310 branchvers.append(basever[1])
311
312 alltags = subprocess.Popen(
313 ['git', 'ls-remote', '--tags', repo_url],
314 stdout = subprocess.PIPE)
315
316 while True:
317 line = alltags.stdout.readline()
318
319 if not line:
320 break
321
322 (ref, tag) = line.split()
323
324 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
325
326 # only list tags matching configured branches
327 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
328 taglist.append(tagver[1])
329
330 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
331 taglist.insert(0, '')
332
333 self._choice_list = taglist
334
335 return self._choice_list
336
337 def updateFromKwargs(self, properties, kwargs, **unused):
338 tag = self.getFromKwargs(kwargs)
339 properties[self.name] = tag
340
341 # find the commit matching the tag
342 findrev = subprocess.Popen(['git', 'rev-parse', 'tags/'+tag], stdout=subprocess.PIPE, cwd=work_dir+'/work.git')
343 findrev.wait(timeout=10)
344 line = findrev.stdout.readline()
345
346 if findrev.returncode!=0 or not line:
347 raise ValidationError("Couldn't find tag")
348
349 properties['force_revision'] = line.decode().strip()
350
351 # find the branch matching the tag
352 branch = None
353 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
354 for b in branchNames:
355 if b.endswith(branchver[1]):
356 branch = b
357
358 if not branch:
359 raise ValidationError("Couldn't find branch")
360
361 properties['force_branch'] = branch
362
363 def parse_from_arg(self, s):
364 if self.strict and s not in self._choice_list:
365 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
366 return s
367
368 c['schedulers'] = []
369 c['schedulers'].append(AnyBranchScheduler(
370 name = "all",
371 change_filter = util.ChangeFilter(branch=branchNames),
372 treeStableTimer = 15*60,
373 builderNames = list(targets)))
374
375 c['schedulers'].append(ForceScheduler(
376 name = "force",
377 buttonName = "Force builds",
378 label = "Force build details",
379 builderNames = [ "00_force_build" ],
380
381 codebases = [
382 util.CodebaseParameter(
383 "",
384 label = "Repository",
385 branch = util.FixedParameter(name = "branch", default = ""),
386 revision = util.FixedParameter(name = "revision", default = ""),
387 repository = util.FixedParameter(name = "repository", default = ""),
388 project = util.FixedParameter(name = "project", default = "")
389 )
390 ],
391
392 reason = util.StringParameter(
393 name = "reason",
394 label = "Reason",
395 default = "Trigger build",
396 required = True,
397 size = 80
398 ),
399
400 properties = [
401 util.ChoiceStringParameter(
402 name = "target",
403 label = "Build target",
404 default = "all",
405 choices = [ "all" ] + list(targets)
406 ),
407 TagChoiceParameter(
408 name = "tag",
409 label = "Build tag",
410 default = ""
411 )
412 ]
413 ))
414
415 ####### BUILDERS
416
417 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
418 # what steps, and which workers can execute them. Note that any particular build will
419 # only take place on one worker.
420
421 def IsNoMasterBuild(step):
422 return step.getProperty("branch") != "master"
423
424 def IsUsignEnabled(step):
425 branch = step.getProperty("branch")
426 return branch and branches[branch].get("usign_key")
427
428 def IsSignEnabled(step):
429 branch = step.getProperty("branch")
430 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
431
432 def IsKmodArchiveEnabled(step):
433 branch = step.getProperty("branch")
434 return branch and branches[branch].get("kmod_archive")
435
436 def GetBaseVersion(branch):
437 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
438 return branch.split('-')[1]
439 else:
440 return "master"
441
442 @properties.renderer
443 def GetVersionPrefix(props):
444 branch = props.getProperty("branch")
445 basever = GetBaseVersion(branch)
446 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
447 return "%s/" % props["tag"][1:]
448 elif basever != "master":
449 return "%s-SNAPSHOT/" % basever
450 else:
451 return ""
452
453 @util.renderer
454 def GetConfigSeed(props):
455 branch = props.getProperty("branch")
456 return branch and branches[branch].get("config_seed") or ""
457
458 @util.renderer
459 def GetRsyncParams(props, srcorbin, urlorkey):
460 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
461 branch = props.getProperty("branch")
462 opt = srcorbin + "_" + urlorkey
463 return branch and branches[branch].get(opt)
464
465 @util.renderer
466 def GetUsignKey(props):
467 branch = props.getProperty("branch")
468 return branch and branches[branch].get("usign_key")
469
470 def GetNextBuild(builder, requests):
471 for r in requests:
472 if r.properties:
473 # order tagged build first
474 if r.properties.hasProperty("tag"):
475 return r
476 # then order by branch order
477 pbranch = r.properties.getProperty("branch")
478 for name in branchNames:
479 if pbranch == name:
480 return r
481
482 r = requests[0]
483 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
484 return r
485
486 def MakeEnv(overrides=None, tryccache=False):
487 env = {
488 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
489 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
490 }
491 if tryccache:
492 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
493 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
494 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
495 else:
496 env['CC'] = env['CCC']
497 env['CXX'] = env['CCXX']
498 env['CCACHE'] = ''
499 if overrides is not None:
500 env.update(overrides)
501 return env
502
503 @properties.renderer
504 def NetLockDl(props, extralock=None):
505 lock = None
506 if props.hasProperty("dl_lock"):
507 lock = NetLocks[props["dl_lock"]]
508 if lock is not None:
509 return [lock.access('exclusive')]
510 else:
511 return []
512
513 @properties.renderer
514 def NetLockUl(props):
515 lock = None
516 if props.hasProperty("ul_lock"):
517 lock = NetLocks[props["ul_lock"]]
518 if lock is not None:
519 return [lock.access('exclusive')]
520 else:
521 return []
522
523 def IsTargetSelected(target):
524 def CheckTargetProperty(step):
525 selected_target = step.getProperty("target", "all")
526 if selected_target != "all" and selected_target != target:
527 return False
528 return True
529
530 return CheckTargetProperty
531
532 @util.renderer
533 def UsignSec2Pub(props):
534 branch = props.getProperty("branch")
535 try:
536 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
537 seckey = branches[branch].get("usign_key")
538 seckey = base64.b64decode(seckey)
539 except:
540 return None
541
542 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
543 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
544
545
546 c['builders'] = []
547
548 workerNames = [ ]
549
550 for worker in c['workers']:
551 workerNames.append(worker.workername)
552
553 force_factory = BuildFactory()
554
555 c['builders'].append(BuilderConfig(
556 name = "00_force_build",
557 workernames = workerNames,
558 factory = force_factory))
559
560 for target in targets:
561 ts = target.split('/')
562
563 factory = BuildFactory()
564
565 # setup shared work directory if required
566 factory.addStep(ShellCommand(
567 name = "sharedwd",
568 descriptionDone = "Shared work directory set up",
569 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
570 workdir = ".",
571 haltOnFailure = True,
572 ))
573
574 # find number of cores
575 factory.addStep(SetPropertyFromCommand(
576 name = "nproc",
577 property = "nproc",
578 description = "Finding number of CPUs",
579 command = ["nproc"],
580 ))
581
582 # find gcc and g++ compilers
583 factory.addStep(FileDownload(
584 name = "dlfindbinpl",
585 mastersrc = scripts_dir + '/findbin.pl',
586 workerdest = "../findbin.pl",
587 mode = 0o755,
588 ))
589
590 factory.addStep(SetPropertyFromCommand(
591 name = "gcc",
592 property = "cc_command",
593 description = "Finding gcc command",
594 command = ["../findbin.pl", "gcc", "", ""],
595 haltOnFailure = True,
596 ))
597
598 factory.addStep(SetPropertyFromCommand(
599 name = "g++",
600 property = "cxx_command",
601 description = "Finding g++ command",
602 command = ["../findbin.pl", "g++", "", ""],
603 haltOnFailure = True,
604 ))
605
606 # see if ccache is available
607 factory.addStep(SetPropertyFromCommand(
608 name = "ccache",
609 property = "ccache_command",
610 description = "Testing for ccache command",
611 command = ["which", "ccache"],
612 haltOnFailure = False,
613 flunkOnFailure = False,
614 warnOnFailure = False,
615 hideStepIf = lambda r, s: r==results.FAILURE,
616 ))
617
618 # check out the source
619 # Git() runs:
620 # if repo doesn't exist: 'git clone repourl'
621 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
622 # git cat-file -e <commit>
623 # git checkout -f <commit>
624 # git checkout -B <branch>
625 # git rev-parse HEAD
626 factory.addStep(Git(
627 name = "git",
628 repourl = repo_url,
629 mode = 'full',
630 method = 'fresh',
631 locks = NetLockDl,
632 haltOnFailure = True,
633 ))
634
635 # update remote refs
636 factory.addStep(ShellCommand(
637 name = "fetchrefs",
638 description = "Fetching Git remote refs",
639 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
640 haltOnFailure = True,
641 ))
642
643 # Verify that Git HEAD points to a tag or branch
644 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
645 factory.addStep(ShellCommand(
646 name = "gitverify",
647 description = "Ensure that Git HEAD is pointing to a branch or tag",
648 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
649 haltOnFailure = True,
650 ))
651
652 factory.addStep(ShellCommand(
653 name = "rmtmp",
654 description = "Remove tmp folder",
655 command=["rm", "-rf", "tmp/"],
656 ))
657
658 # feed
659 factory.addStep(ShellCommand(
660 name = "rmfeedlinks",
661 description = "Remove feed symlinks",
662 command=["rm", "-rf", "package/feeds/"],
663 ))
664
665 factory.addStep(StringDownload(
666 name = "ccachecc",
667 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
668 workerdest = "../ccache_cc.sh",
669 mode = 0o755,
670 ))
671
672 factory.addStep(StringDownload(
673 name = "ccachecxx",
674 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
675 workerdest = "../ccache_cxx.sh",
676 mode = 0o755,
677 ))
678
679 # feed
680 factory.addStep(ShellCommand(
681 name = "updatefeeds",
682 description = "Updating feeds",
683 command=["./scripts/feeds", "update"],
684 env = MakeEnv(tryccache=True),
685 haltOnFailure = True,
686 locks = NetLockDl,
687 ))
688
689 # feed
690 factory.addStep(ShellCommand(
691 name = "installfeeds",
692 description = "Installing feeds",
693 command=["./scripts/feeds", "install", "-a"],
694 env = MakeEnv(tryccache=True),
695 haltOnFailure = True,
696 ))
697
698 # seed config
699 factory.addStep(StringDownload(
700 name = "dlconfigseed",
701 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
702 workerdest = ".config",
703 mode = 0o644,
704 ))
705
706 # configure
707 factory.addStep(ShellCommand(
708 name = "newconfig",
709 descriptionDone = ".config seeded",
710 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
711 ))
712
713 factory.addStep(ShellCommand(
714 name = "delbin",
715 description = "Removing output directory",
716 command = ["rm", "-rf", "bin/"],
717 ))
718
719 factory.addStep(ShellCommand(
720 name = "defconfig",
721 description = "Populating .config",
722 command = ["make", "defconfig"],
723 env = MakeEnv(),
724 ))
725
726 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
727 factory.addStep(ShellCommand(
728 name = "checkarch",
729 description = "Checking architecture",
730 descriptionDone = "Architecture validated",
731 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
732 logEnviron = False,
733 want_stdout = False,
734 want_stderr = False,
735 haltOnFailure = True,
736 flunkOnFailure = False, # this is not a build FAILURE
737 ))
738
739 # find libc suffix
740 factory.addStep(SetPropertyFromCommand(
741 name = "libc",
742 property = "libc",
743 description = "Finding libc suffix",
744 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
745 ))
746
747 # install build key
748 factory.addStep(StringDownload(
749 name = "dlkeybuildpub",
750 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
751 workerdest = "key-build.pub",
752 mode = 0o600,
753 doStepIf = IsUsignEnabled,
754 ))
755
756 factory.addStep(StringDownload(
757 name = "dlkeybuild",
758 s = "# fake private key",
759 workerdest = "key-build",
760 mode = 0o600,
761 doStepIf = IsUsignEnabled,
762 ))
763
764 factory.addStep(StringDownload(
765 name = "dlkeybuilducert",
766 s = "# fake certificate",
767 workerdest = "key-build.ucert",
768 mode = 0o600,
769 doStepIf = IsUsignEnabled,
770 ))
771
772 # prepare dl
773 factory.addStep(ShellCommand(
774 name = "dldir",
775 description = "Preparing dl/",
776 descriptionDone = "dl/ prepared",
777 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
778 workdir = Property("builddir"),
779 logEnviron = False,
780 want_stdout = False,
781 ))
782
783 # cleanup dl
784 factory.addStep(ShellCommand(
785 name = "dlprune",
786 description = "Pruning dl/",
787 descriptionDone = "dl/ pruned",
788 command = 'find dl/ -atime +15 -delete -print',
789 logEnviron = False,
790 ))
791
792 # prepare tar
793 factory.addStep(ShellCommand(
794 name = "dltar",
795 description = "Building and installing GNU tar",
796 descriptionDone = "GNU tar built and installed",
797 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
798 env = MakeEnv(tryccache=True),
799 haltOnFailure = True,
800 ))
801
802 # populate dl
803 factory.addStep(ShellCommand(
804 name = "dlrun",
805 description = "Populating dl/",
806 descriptionDone = "dl/ populated",
807 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
808 env = MakeEnv(),
809 logEnviron = False,
810 locks = NetLockDl,
811 ))
812
813 factory.addStep(ShellCommand(
814 name = "cleanbase",
815 description = "Cleaning base-files",
816 command=["make", "package/base-files/clean", "V=s"],
817 ))
818
819 # build
820 factory.addStep(ShellCommand(
821 name = "tools",
822 description = "Building and installing tools",
823 descriptionDone = "Tools built and installed",
824 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
825 env = MakeEnv(tryccache=True),
826 haltOnFailure = True,
827 ))
828
829 factory.addStep(ShellCommand(
830 name = "toolchain",
831 description = "Building and installing toolchain",
832 descriptionDone = "Toolchain built and installed",
833 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
834 env = MakeEnv(),
835 haltOnFailure = True,
836 ))
837
838 factory.addStep(ShellCommand(
839 name = "kmods",
840 description = "Building kmods",
841 descriptionDone = "Kmods built",
842 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
843 env = MakeEnv(),
844 haltOnFailure = True,
845 ))
846
847 # find kernel version
848 factory.addStep(SetPropertyFromCommand(
849 name = "kernelversion",
850 property = "kernelversion",
851 description = "Finding the effective Kernel version",
852 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
853 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
854 ))
855
856 factory.addStep(ShellCommand(
857 name = "pkgclean",
858 description = "Cleaning up package build",
859 descriptionDone = "Package build cleaned up",
860 command=["make", "package/cleanup", "V=s"],
861 ))
862
863 factory.addStep(ShellCommand(
864 name = "pkgbuild",
865 description = "Building packages",
866 descriptionDone = "Packages built",
867 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
868 env = MakeEnv(),
869 haltOnFailure = True,
870 ))
871
872 factory.addStep(ShellCommand(
873 name = "pkginstall",
874 description = "Installing packages",
875 descriptionDone = "Packages installed",
876 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
877 env = MakeEnv(),
878 haltOnFailure = True,
879 ))
880
881 factory.addStep(ShellCommand(
882 name = "pkgindex",
883 description = "Indexing packages",
884 descriptionDone = "Packages indexed",
885 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
886 env = MakeEnv(),
887 haltOnFailure = True,
888 ))
889
890 factory.addStep(ShellCommand(
891 name = "images",
892 description = "Building and installing images",
893 descriptionDone = "Images built and installed",
894 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
895 env = MakeEnv(),
896 haltOnFailure = True,
897 ))
898
899 factory.addStep(ShellCommand(
900 name = "buildinfo",
901 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
902 command = "make -j1 buildinfo V=s || true",
903 env = MakeEnv(),
904 haltOnFailure = True,
905 ))
906
907 factory.addStep(ShellCommand(
908 name = "json_overview_image_info",
909 description = "Generating profiles.json in target folder",
910 command = "make -j1 json_overview_image_info V=s || true",
911 env = MakeEnv(),
912 haltOnFailure = True,
913 ))
914
915 factory.addStep(ShellCommand(
916 name = "checksums",
917 description = "Calculating checksums",
918 descriptionDone = "Checksums calculated",
919 command=["make", "-j1", "checksum", "V=s"],
920 env = MakeEnv(),
921 haltOnFailure = True,
922 ))
923
924 factory.addStep(ShellCommand(
925 name = "kmoddir",
926 descriptionDone = "Kmod directory created",
927 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
928 haltOnFailure = True,
929 doStepIf = IsKmodArchiveEnabled,
930 ))
931
932 factory.addStep(ShellCommand(
933 name = "kmodprepare",
934 description = "Preparing kmod archive",
935 descriptionDone = "Kmod archive prepared",
936 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
937 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
938 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
939 haltOnFailure = True,
940 doStepIf = IsKmodArchiveEnabled,
941 ))
942
943 factory.addStep(ShellCommand(
944 name = "kmodindex",
945 description = "Indexing kmod archive",
946 descriptionDone = "Kmod archive indexed",
947 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
948 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
949 env = MakeEnv(),
950 haltOnFailure = True,
951 doStepIf = IsKmodArchiveEnabled,
952 ))
953
954 # sign
955 factory.addStep(MasterShellCommand(
956 name = "signprepare",
957 descriptionDone = "Temporary signing directory prepared",
958 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
959 haltOnFailure = True,
960 doStepIf = IsSignEnabled,
961
962 ))
963
964 factory.addStep(ShellCommand(
965 name = "signpack",
966 description = "Packing files to sign",
967 descriptionDone = "Files to sign packed",
968 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
969 haltOnFailure = True,
970 doStepIf = IsSignEnabled,
971 ))
972
973 factory.addStep(FileUpload(
974 workersrc = "sign.tar.gz",
975 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
976 haltOnFailure = True,
977 doStepIf = IsSignEnabled,
978 ))
979
980 factory.addStep(MasterShellCommand(
981 name = "signfiles",
982 description = "Signing files",
983 descriptionDone = "Files signed",
984 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
985 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
986 haltOnFailure = True,
987 doStepIf = IsSignEnabled,
988 ))
989
990 factory.addStep(FileDownload(
991 name = "dlsigntargz",
992 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
993 workerdest = "sign.tar.gz",
994 haltOnFailure = True,
995 doStepIf = IsSignEnabled,
996 ))
997
998 factory.addStep(ShellCommand(
999 name = "signunpack",
1000 description = "Unpacking signed files",
1001 descriptionDone = "Signed files unpacked",
1002 command = ["tar", "-xzf", "sign.tar.gz"],
1003 haltOnFailure = True,
1004 doStepIf = IsSignEnabled,
1005 ))
1006
1007 # upload
1008 factory.addStep(ShellCommand(
1009 name = "dirprepare",
1010 descriptionDone = "Upload directory structure prepared",
1011 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1012 haltOnFailure = True,
1013 ))
1014
1015 factory.addStep(ShellCommand(
1016 name = "linkprepare",
1017 descriptionDone = "Repository symlink prepared",
1018 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1019 doStepIf = IsNoMasterBuild,
1020 haltOnFailure = True,
1021 ))
1022
1023 factory.addStep(ShellCommand(
1024 name = "kmoddirprepare",
1025 descriptionDone = "Kmod archive upload directory prepared",
1026 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1027 haltOnFailure = True,
1028 doStepIf = IsKmodArchiveEnabled,
1029 ))
1030
1031 factory.addStep(ShellCommand(
1032 name = "dirupload",
1033 description = "Uploading directory structure",
1034 descriptionDone = "Directory structure uploaded",
1035 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1036 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1037 haltOnFailure = True,
1038 logEnviron = False,
1039 locks = NetLockUl,
1040 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1041 ))
1042
1043 # download remote sha256sums to 'target-sha256sums'
1044 factory.addStep(ShellCommand(
1045 name = "target-sha256sums",
1046 description = "Fetching remote sha256sums for target",
1047 descriptionDone = "Remote sha256sums for target fetched",
1048 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1049 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1050 logEnviron = False,
1051 haltOnFailure = False,
1052 flunkOnFailure = False,
1053 warnOnFailure = False,
1054 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1055 ))
1056
1057 # build list of files to upload
1058 factory.addStep(FileDownload(
1059 name = "dlsha2rsyncpl",
1060 mastersrc = scripts_dir + '/sha2rsync.pl',
1061 workerdest = "../sha2rsync.pl",
1062 mode = 0o755,
1063 ))
1064
1065 factory.addStep(ShellCommand(
1066 name = "buildlist",
1067 description = "Building list of files to upload",
1068 descriptionDone = "List of files to upload built",
1069 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1070 haltOnFailure = True,
1071 ))
1072
1073 factory.addStep(FileDownload(
1074 name = "dlrsync.sh",
1075 mastersrc = scripts_dir + '/rsync.sh',
1076 workerdest = "../rsync.sh",
1077 mode = 0o755,
1078 ))
1079
1080 # upload new files and update existing ones
1081 factory.addStep(ShellCommand(
1082 name = "targetupload",
1083 description = "Uploading target files",
1084 descriptionDone = "Target files uploaded",
1085 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1086 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1087 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1088 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1089 haltOnFailure = True,
1090 logEnviron = False,
1091 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1092 ))
1093
1094 # delete files which don't exist locally
1095 factory.addStep(ShellCommand(
1096 name = "targetprune",
1097 description = "Pruning target files",
1098 descriptionDone = "Target files pruned",
1099 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1100 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1101 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1102 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1103 haltOnFailure = True,
1104 logEnviron = False,
1105 locks = NetLockUl,
1106 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1107 ))
1108
1109 factory.addStep(ShellCommand(
1110 name = "kmodupload",
1111 description = "Uploading kmod archive",
1112 descriptionDone = "Kmod archive uploaded",
1113 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1114 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1115 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1116 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1117 haltOnFailure = True,
1118 logEnviron = False,
1119 locks = NetLockUl,
1120 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1121 ))
1122
1123 factory.addStep(ShellCommand(
1124 name = "sourcelist",
1125 description = "Finding source archives to upload",
1126 descriptionDone = "Source archives to upload found",
1127 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1128 haltOnFailure = True,
1129 ))
1130
1131 factory.addStep(ShellCommand(
1132 name = "sourceupload",
1133 description = "Uploading source archives",
1134 descriptionDone = "Source archives uploaded",
1135 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1136 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1137 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1138 haltOnFailure = True,
1139 logEnviron = False,
1140 locks = NetLockUl,
1141 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1142 ))
1143
1144 factory.addStep(ShellCommand(
1145 name = "df",
1146 description = "Reporting disk usage",
1147 command=["df", "-h", "."],
1148 env={'LC_ALL': 'C'},
1149 logEnviron = False,
1150 haltOnFailure = False,
1151 flunkOnFailure = False,
1152 warnOnFailure = False,
1153 alwaysRun = True,
1154 ))
1155
1156 factory.addStep(ShellCommand(
1157 name = "du",
1158 description = "Reporting estimated file space usage",
1159 command=["du", "-sh", "."],
1160 env={'LC_ALL': 'C'},
1161 logEnviron = False,
1162 haltOnFailure = False,
1163 flunkOnFailure = False,
1164 warnOnFailure = False,
1165 alwaysRun = True,
1166 ))
1167
1168 factory.addStep(ShellCommand(
1169 name = "ccachestat",
1170 description = "Reporting ccache stats",
1171 command=["ccache", "-s"],
1172 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1173 logEnviron = False,
1174 want_stderr = False,
1175 haltOnFailure = False,
1176 flunkOnFailure = False,
1177 warnOnFailure = False,
1178 hideStepIf = lambda r, s: r==results.FAILURE,
1179 ))
1180
1181 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1182
1183 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1184 force_factory.addStep(steps.Trigger(
1185 name = "trigger_%s" % target,
1186 description = "Triggering %s build" % target,
1187 schedulerNames = [ "trigger_%s" % target ],
1188 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
1189 set_properties = { "reason": Property("reason"), "tag": Property("tag"), },
1190 doStepIf = IsTargetSelected(target),
1191 ))
1192
1193
1194 ####### STATUS TARGETS
1195
1196 # 'status' is a list of Status Targets. The results of each build will be
1197 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1198 # including web pages, email senders, and IRC bots.
1199
1200 if "status_bind" in inip1:
1201 c['www'] = {
1202 'port': inip1.get("status_bind"),
1203 'plugins': {
1204 'waterfall_view': True,
1205 'console_view': True,
1206 'grid_view': True
1207 }
1208 }
1209
1210 if "status_user" in inip1 and "status_password" in inip1:
1211 c['www']['auth'] = util.UserPasswordAuth([
1212 (inip1.get("status_user"), inip1.get("status_password"))
1213 ])
1214 c['www']['authz'] = util.Authz(
1215 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1216 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1217 )
1218
1219 c['services'] = []
1220 if ini.has_section("irc"):
1221 iniirc = ini['irc']
1222 irc_host = iniirc.get("host", None)
1223 irc_port = iniirc.getint("port", 6667)
1224 irc_chan = iniirc.get("channel", None)
1225 irc_nick = iniirc.get("nickname", None)
1226 irc_pass = iniirc.get("password", None)
1227
1228 if irc_host and irc_nick and irc_chan:
1229 irc = reporters.IRC(irc_host, irc_nick,
1230 port = irc_port,
1231 password = irc_pass,
1232 channels = [ irc_chan ],
1233 notify_events = [ 'exception', 'problem', 'recovery' ]
1234 )
1235
1236 c['services'].append(irc)
1237
1238 c['revlink'] = util.RevlinkMatch([
1239 r'https://git.openwrt.org/openwrt/(.*).git'
1240 ],
1241 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1242
1243 ####### DB URL
1244
1245 c['db'] = {
1246 # This specifies what database buildbot uses to store its state. You can leave
1247 # this at its default for all but the largest installations.
1248 'db_url' : "sqlite:///state.sqlite",
1249 }
1250
1251 c['buildbotNetUsageData'] = None