phase1: switch to separate builders per branch per target
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64
65 rsync_defopts = ["-v", "-4", "--timeout=120"]
66
67 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72 def ini_parse_branch(section):
73 b = {}
74 name = section.get("name")
75
76 if not name:
77 raise ValueError("missing 'name' in " + repr(section))
78 if name in branches:
79 raise ValueError("duplicate branch name in " + repr(section))
80
81 b["name"] = name
82 b["bin_url"] = section.get("binary_url")
83 b["bin_key"] = section.get("binary_password")
84
85 b["src_url"] = section.get("source_url")
86 b["src_key"] = section.get("source_password")
87
88 b["gpg_key"] = section.get("gpg_key")
89
90 b["usign_key"] = section.get("usign_key")
91 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
92 b["usign_comment"] = section.get("usign_comment", usign_comment)
93
94 b["config_seed"] = section.get("config_seed")
95
96 b["kmod_archive"] = section.getboolean("kmod_archive", False)
97
98 branches[name] = b
99 log.msg("Configured branch: {}".format(name))
100
101 # PB port can be either a numeric port or a connection string
102 pb_port = inip1.get("port") or 9989
103
104 # This is the dictionary that the buildmaster pays attention to. We also use
105 # a shorter alias to save typing.
106 c = BuildmasterConfig = {}
107
108 ####### PROJECT IDENTITY
109
110 # the 'title' string will appear at the top of this buildbot
111 # installation's html.WebStatus home page (linked to the
112 # 'titleURL') and is embedded in the title of the waterfall HTML page.
113
114 c['title'] = ini['general'].get("title")
115 c['titleURL'] = ini['general'].get("title_url")
116
117 # the 'buildbotURL' string should point to the location where the buildbot's
118 # internal web server (usually the html.WebStatus page) is visible. This
119 # typically uses the port number set in the Waterfall 'status' entry, but
120 # with an externally-visible host name which the buildbot cannot figure out
121 # without some help.
122
123 c['buildbotURL'] = inip1.get("buildbot_url")
124
125 ####### BUILDWORKERS
126
127 # The 'workers' list defines the set of recognized buildworkers. Each element is
128 # a Worker object, specifying a unique worker name and password. The same
129 # worker name and password must be configured on the worker.
130
131 c['workers'] = []
132 NetLocks = dict()
133
134 for section in ini.sections():
135 if section.startswith("branch "):
136 ini_parse_branch(ini[section])
137
138 if section.startswith("worker "):
139 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
140 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
141 sl_props = { 'dl_lock':None, 'ul_lock':None }
142 name = ini.get(section, "name")
143 password = ini.get(section, "password")
144 if ini.has_option(section, "dl_lock"):
145 lockname = ini.get(section, "dl_lock")
146 sl_props['dl_lock'] = lockname
147 if lockname not in NetLocks:
148 NetLocks[lockname] = locks.MasterLock(lockname)
149 if ini.has_option(section, "ul_lock"):
150 lockname = ini.get(section, "ul_lock")
151 sl_props['ul_lock'] = lockname
152 if lockname not in NetLocks:
153 NetLocks[lockname] = locks.MasterLock(lockname)
154 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
155
156 c['protocols'] = {'pb': {'port': pb_port}}
157
158 # coalesce builds
159 c['collapseRequests'] = True
160
161 # Reduce amount of backlog data
162 c['configurators'] = [util.JanitorConfigurator(
163 logHorizon=timedelta(days=3),
164 hour=6,
165 )]
166
167 @defer.inlineCallbacks
168 def getNewestCompleteTime(bldr):
169 """Returns the complete_at of the latest completed and not SKIPPED
170 build request for this builder, or None if there are no such build
171 requests. We need to filter out SKIPPED requests because we're
172 using collapseRequests=True which is unfortunately marking all
173 previous requests as complete when new buildset is created.
174
175 @returns: datetime instance or None, via Deferred
176 """
177
178 bldrid = yield bldr.getBuilderId()
179 completed = yield bldr.master.data.get(
180 ('builders', bldrid, 'buildrequests'),
181 [
182 resultspec.Filter('complete', 'eq', [True]),
183 resultspec.Filter('results', 'ne', [results.SKIPPED]),
184 ],
185 order=['-complete_at'], limit=1)
186 if not completed:
187 return
188
189 complete_at = completed[0]['complete_at']
190
191 last_build = yield bldr.master.data.get(
192 ('builds', ),
193 [
194 resultspec.Filter('builderid', 'eq', [bldrid]),
195 ],
196 order=['-started_at'], limit=1)
197
198 if last_build and last_build[0]:
199 last_complete_at = last_build[0]['complete_at']
200 if last_complete_at and (last_complete_at > complete_at):
201 return last_complete_at
202
203 return complete_at
204
205 @defer.inlineCallbacks
206 def prioritizeBuilders(master, builders):
207 """Returns sorted list of builders by their last timestamp of completed and
208 not skipped build.
209
210 @returns: list of sorted builders
211 """
212
213 def is_building(bldr):
214 return bool(bldr.building) or bool(bldr.old_building)
215
216 def bldr_info(bldr):
217 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
218 d.addCallback(lambda complete_at: (complete_at, bldr))
219 return d
220
221 def bldr_sort(item):
222 (complete_at, bldr) = item
223
224 if bldr.name == "00_force_build":
225 date = datetime.min
226 complete_at = date.replace(tzinfo=tzutc())
227 return (complete_at, bldr.name)
228
229 if not complete_at:
230 date = datetime.min
231 complete_at = date.replace(tzinfo=tzutc())
232
233 if is_building(bldr):
234 date = datetime.max
235 complete_at = date.replace(tzinfo=tzutc())
236
237 return (complete_at, bldr.name)
238
239 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
240 results.sort(key=bldr_sort)
241
242 for r in results:
243 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
244
245 return [r[1] for r in results]
246
247 c['prioritizeBuilders'] = prioritizeBuilders
248
249 ####### CHANGESOURCES
250
251 branchNames = [branches[b]["name"] for b in branches]
252
253 # find targets
254 targets = set()
255
256 def populateTargets():
257 log.msg("Populating targets, this will take time")
258 sourcegit = work_dir + '/source.git'
259 for branch in branchNames:
260 if os.path.isdir(sourcegit):
261 subprocess.call(["rm", "-rf", sourcegit])
262
263 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
264
265 os.makedirs(sourcegit + '/tmp', exist_ok=True)
266 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
267 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
268
269 while True:
270 line = findtargets.stdout.readline()
271 if not line:
272 break
273 ta = line.decode().strip().split(' ')
274 targets.add(ta[0])
275
276 subprocess.call(["rm", "-rf", sourcegit])
277
278 populateTargets()
279
280 # the 'change_source' setting tells the buildmaster how it should find out
281 # about source code changes. Here we point to the buildbot clone of pyflakes.
282
283 c['change_source'] = []
284 c['change_source'].append(GitPoller(
285 repo_url,
286 workdir=work_dir+'/work.git', branches=branchNames,
287 pollAtLaunch=True, pollinterval=300))
288
289 ####### SCHEDULERS
290
291 # Configure the Schedulers, which decide how to react to incoming changes. In this
292 # case, just kick off a 'basebuild' build
293
294 class TagChoiceParameter(BaseParameter):
295 spec_attributes = ["strict", "choices"]
296 type = "list"
297 strict = True
298
299 def __init__(self, name, label=None, **kw):
300 super().__init__(name, label, **kw)
301 self._choice_list = []
302
303 def getRevTags(self, findtag=None):
304 taglist = []
305 branchvers = []
306
307 for b in branchNames:
308 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
309 if basever:
310 branchvers.append(basever[1])
311
312 alltags = subprocess.Popen(
313 ['git', 'ls-remote', '--tags', repo_url],
314 stdout = subprocess.PIPE)
315
316 while True:
317 line = alltags.stdout.readline()
318
319 if not line:
320 break
321
322 (rev, tag) = line.split()
323
324 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
325
326 # only list tags matching configured branches
327 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
328 if findtag and findtag != tagver[1]:
329 continue
330 taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]})
331
332 return taglist
333
334 @property
335 def choices(self):
336 taglist = [rt['tag'] for rt in self.getRevTags()]
337 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
338 taglist.insert(0, '')
339
340 self._choice_list = taglist
341
342 return self._choice_list
343
344 def updateFromKwargs(self, properties, kwargs, **unused):
345 tag = self.getFromKwargs(kwargs)
346 properties[self.name] = tag
347
348 # find the commit matching the tag
349 findtag = self.getRevTags(tag)
350
351 if not findtag:
352 raise ValidationError("Couldn't find tag")
353
354 properties['force_revision'] = findtag[0]['rev']
355
356 # find the branch matching the tag
357 branch = None
358 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
359 for b in branchNames:
360 if b.endswith(branchver[1]):
361 branch = b
362
363 if not branch:
364 raise ValidationError("Couldn't find branch")
365
366 properties['force_branch'] = branch
367
368 def parse_from_arg(self, s):
369 if self.strict and s not in self._choice_list:
370 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
371 return s
372
373 @util.renderer
374 @defer.inlineCallbacks
375 def builderNames(props):
376 """ since we have per branch and per target builders,
377 address the relevant builder for each new buildrequest
378 based on the request's desired branch and target.
379 """
380 branch = props.getProperty("branch")
381 target = props.getProperty("target", "")
382
383 if target == "all":
384 target = ""
385
386 # if that didn't work, try sourcestamp to find a branch
387 if not branch:
388 # match builders with target branch
389 ss = props.sourcestamps[0]
390 if ss:
391 branch = ss['branch']
392 else:
393 log.msg("couldn't find builder")
394 return [] # nothing works
395
396 bname = branch + "_" + target
397 builders = []
398
399 for b in (yield props.master.data.get(('builders',))):
400 if not b['name'].startswith(bname):
401 continue
402 builders.append(b['name'])
403
404 return builders
405
406 c['schedulers'] = []
407 c['schedulers'].append(AnyBranchScheduler(
408 name = "all",
409 change_filter = util.ChangeFilter(branch=branchNames),
410 treeStableTimer = 15*60,
411 builderNames = builderNames))
412
413 c['schedulers'].append(ForceScheduler(
414 name = "force",
415 buttonName = "Force builds",
416 label = "Force build details",
417 builderNames = [ "00_force_build" ],
418
419 codebases = [
420 util.CodebaseParameter(
421 "",
422 label = "Repository",
423 branch = util.FixedParameter(name = "branch", default = ""),
424 revision = util.FixedParameter(name = "revision", default = ""),
425 repository = util.FixedParameter(name = "repository", default = ""),
426 project = util.FixedParameter(name = "project", default = "")
427 )
428 ],
429
430 reason = util.StringParameter(
431 name = "reason",
432 label = "Reason",
433 default = "Trigger build",
434 required = True,
435 size = 80
436 ),
437
438 properties = [
439 util.ChoiceStringParameter(
440 name = "target",
441 label = "Build target",
442 default = "all",
443 choices = [ "all" ] + list(targets)
444 ),
445 TagChoiceParameter(
446 name = "tag",
447 label = "Build tag",
448 default = ""
449 )
450 ]
451 ))
452
453 c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames))
454
455 ####### BUILDERS
456
457 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
458 # what steps, and which workers can execute them. Note that any particular build will
459 # only take place on one worker.
460
461 def IsNoMasterBuild(step):
462 return step.getProperty("branch") != "master"
463
464 def IsUsignEnabled(step):
465 branch = step.getProperty("branch")
466 return branch and branches[branch].get("usign_key")
467
468 def IsSignEnabled(step):
469 branch = step.getProperty("branch")
470 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
471
472 def IsKmodArchiveEnabled(step):
473 branch = step.getProperty("branch")
474 return branch and branches[branch].get("kmod_archive")
475
476 def GetBaseVersion(branch):
477 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
478 return branch.split('-')[1]
479 else:
480 return "master"
481
482 @properties.renderer
483 def GetVersionPrefix(props):
484 branch = props.getProperty("branch")
485 basever = GetBaseVersion(branch)
486 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
487 return "%s/" % props["tag"][1:]
488 elif basever != "master":
489 return "%s-SNAPSHOT/" % basever
490 else:
491 return ""
492
493 @util.renderer
494 def GetConfigSeed(props):
495 branch = props.getProperty("branch")
496 return branch and branches[branch].get("config_seed") or ""
497
498 @util.renderer
499 def GetRsyncParams(props, srcorbin, urlorkey):
500 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
501 branch = props.getProperty("branch")
502 opt = srcorbin + "_" + urlorkey
503 return branch and branches[branch].get(opt)
504
505 @util.renderer
506 def GetUsignKey(props):
507 branch = props.getProperty("branch")
508 return branch and branches[branch].get("usign_key")
509
510 def GetNextBuild(builder, requests):
511 for r in requests:
512 if r.properties:
513 # order tagged build first
514 if r.properties.hasProperty("tag"):
515 return r
516 # then order by branch order
517 pbranch = r.properties.getProperty("branch")
518 for name in branchNames:
519 if pbranch == name:
520 return r
521
522 r = requests[0]
523 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
524 return r
525
526 def MakeEnv(overrides=None, tryccache=False):
527 env = {
528 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
529 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
530 }
531 if tryccache:
532 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
533 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
534 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
535 else:
536 env['CC'] = env['CCC']
537 env['CXX'] = env['CCXX']
538 env['CCACHE'] = ''
539 if overrides is not None:
540 env.update(overrides)
541 return env
542
543 @properties.renderer
544 def NetLockDl(props, extralock=None):
545 lock = None
546 if props.hasProperty("dl_lock"):
547 lock = NetLocks[props["dl_lock"]]
548 if lock is not None:
549 return [lock.access('exclusive')]
550 else:
551 return []
552
553 @properties.renderer
554 def NetLockUl(props):
555 lock = None
556 if props.hasProperty("ul_lock"):
557 lock = NetLocks[props["ul_lock"]]
558 if lock is not None:
559 return [lock.access('exclusive')]
560 else:
561 return []
562
563 def IsTargetSelected(target):
564 def CheckTargetProperty(step):
565 selected_target = step.getProperty("target", "all")
566 if selected_target != "all" and selected_target != target:
567 return False
568 return True
569
570 return CheckTargetProperty
571
572 @util.renderer
573 def UsignSec2Pub(props):
574 branch = props.getProperty("branch")
575 try:
576 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
577 seckey = branches[branch].get("usign_key")
578 seckey = base64.b64decode(seckey)
579 except:
580 return None
581
582 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
583 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
584
585
586 c['builders'] = []
587
588 workerNames = [ ]
589
590 for worker in c['workers']:
591 workerNames.append(worker.workername)
592
593 # add a single LocalWorker to handle the forcebuild builder
594 c['workers'].append(LocalWorker("__local_force_build", max_builds=1))
595
596 force_factory = BuildFactory()
597 force_factory.addStep(steps.Trigger(
598 name = "trigger_build",
599 schedulerNames = [ "trigger" ],
600 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
601 set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") },
602 ))
603
604 c['builders'].append(BuilderConfig(
605 name = "00_force_build",
606 workername = "__local_force_build",
607 factory = force_factory))
608
609 for target in targets:
610 ts = target.split('/')
611
612 factory = BuildFactory()
613
614 # setup shared work directory if required
615 factory.addStep(ShellCommand(
616 name = "sharedwd",
617 descriptionDone = "Shared work directory set up",
618 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
619 workdir = ".",
620 haltOnFailure = True,
621 ))
622
623 # find number of cores
624 factory.addStep(SetPropertyFromCommand(
625 name = "nproc",
626 property = "nproc",
627 description = "Finding number of CPUs",
628 command = ["nproc"],
629 ))
630
631 # find gcc and g++ compilers
632 factory.addStep(FileDownload(
633 name = "dlfindbinpl",
634 mastersrc = scripts_dir + '/findbin.pl',
635 workerdest = "../findbin.pl",
636 mode = 0o755,
637 ))
638
639 factory.addStep(SetPropertyFromCommand(
640 name = "gcc",
641 property = "cc_command",
642 description = "Finding gcc command",
643 command = ["../findbin.pl", "gcc", "", ""],
644 haltOnFailure = True,
645 ))
646
647 factory.addStep(SetPropertyFromCommand(
648 name = "g++",
649 property = "cxx_command",
650 description = "Finding g++ command",
651 command = ["../findbin.pl", "g++", "", ""],
652 haltOnFailure = True,
653 ))
654
655 # see if ccache is available
656 factory.addStep(SetPropertyFromCommand(
657 name = "ccache",
658 property = "ccache_command",
659 description = "Testing for ccache command",
660 command = ["which", "ccache"],
661 haltOnFailure = False,
662 flunkOnFailure = False,
663 warnOnFailure = False,
664 hideStepIf = lambda r, s: r==results.FAILURE,
665 ))
666
667 # check out the source
668 # Git() runs:
669 # if repo doesn't exist: 'git clone repourl'
670 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
671 # git cat-file -e <commit>
672 # git checkout -f <commit>
673 # git checkout -B <branch>
674 # git rev-parse HEAD
675 factory.addStep(Git(
676 name = "git",
677 repourl = repo_url,
678 mode = 'full',
679 method = 'fresh',
680 locks = NetLockDl,
681 haltOnFailure = True,
682 ))
683
684 # update remote refs
685 factory.addStep(ShellCommand(
686 name = "fetchrefs",
687 description = "Fetching Git remote refs",
688 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
689 haltOnFailure = True,
690 ))
691
692 # Verify that Git HEAD points to a tag or branch
693 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
694 factory.addStep(ShellCommand(
695 name = "gitverify",
696 description = "Ensure that Git HEAD is pointing to a branch or tag",
697 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
698 haltOnFailure = True,
699 ))
700
701 factory.addStep(ShellCommand(
702 name = "rmtmp",
703 description = "Remove tmp folder",
704 command=["rm", "-rf", "tmp/"],
705 ))
706
707 # feed
708 factory.addStep(ShellCommand(
709 name = "rmfeedlinks",
710 description = "Remove feed symlinks",
711 command=["rm", "-rf", "package/feeds/"],
712 ))
713
714 factory.addStep(StringDownload(
715 name = "ccachecc",
716 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
717 workerdest = "../ccache_cc.sh",
718 mode = 0o755,
719 ))
720
721 factory.addStep(StringDownload(
722 name = "ccachecxx",
723 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
724 workerdest = "../ccache_cxx.sh",
725 mode = 0o755,
726 ))
727
728 # feed
729 factory.addStep(ShellCommand(
730 name = "updatefeeds",
731 description = "Updating feeds",
732 command=["./scripts/feeds", "update"],
733 env = MakeEnv(tryccache=True),
734 haltOnFailure = True,
735 locks = NetLockDl,
736 ))
737
738 # feed
739 factory.addStep(ShellCommand(
740 name = "installfeeds",
741 description = "Installing feeds",
742 command=["./scripts/feeds", "install", "-a"],
743 env = MakeEnv(tryccache=True),
744 haltOnFailure = True,
745 ))
746
747 # seed config
748 factory.addStep(StringDownload(
749 name = "dlconfigseed",
750 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
751 workerdest = ".config",
752 mode = 0o644,
753 ))
754
755 # configure
756 factory.addStep(ShellCommand(
757 name = "newconfig",
758 descriptionDone = ".config seeded",
759 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
760 ))
761
762 factory.addStep(ShellCommand(
763 name = "delbin",
764 description = "Removing output directory",
765 command = ["rm", "-rf", "bin/"],
766 ))
767
768 factory.addStep(ShellCommand(
769 name = "defconfig",
770 description = "Populating .config",
771 command = ["make", "defconfig"],
772 env = MakeEnv(),
773 ))
774
775 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
776 factory.addStep(ShellCommand(
777 name = "checkarch",
778 description = "Checking architecture",
779 descriptionDone = "Architecture validated",
780 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
781 logEnviron = False,
782 want_stdout = False,
783 want_stderr = False,
784 haltOnFailure = True,
785 flunkOnFailure = False, # this is not a build FAILURE
786 ))
787
788 # find libc suffix
789 factory.addStep(SetPropertyFromCommand(
790 name = "libc",
791 property = "libc",
792 description = "Finding libc suffix",
793 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
794 ))
795
796 # install build key
797 factory.addStep(StringDownload(
798 name = "dlkeybuildpub",
799 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
800 workerdest = "key-build.pub",
801 mode = 0o600,
802 doStepIf = IsUsignEnabled,
803 ))
804
805 factory.addStep(StringDownload(
806 name = "dlkeybuild",
807 s = "# fake private key",
808 workerdest = "key-build",
809 mode = 0o600,
810 doStepIf = IsUsignEnabled,
811 ))
812
813 factory.addStep(StringDownload(
814 name = "dlkeybuilducert",
815 s = "# fake certificate",
816 workerdest = "key-build.ucert",
817 mode = 0o600,
818 doStepIf = IsUsignEnabled,
819 ))
820
821 # prepare dl
822 factory.addStep(ShellCommand(
823 name = "dldir",
824 description = "Preparing dl/",
825 descriptionDone = "dl/ prepared",
826 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
827 workdir = Property("builddir"),
828 logEnviron = False,
829 want_stdout = False,
830 ))
831
832 # cleanup dl
833 factory.addStep(ShellCommand(
834 name = "dlprune",
835 description = "Pruning dl/",
836 descriptionDone = "dl/ pruned",
837 command = 'find dl/ -atime +15 -delete -print',
838 logEnviron = False,
839 ))
840
841 # prepare tar
842 factory.addStep(ShellCommand(
843 name = "dltar",
844 description = "Building and installing GNU tar",
845 descriptionDone = "GNU tar built and installed",
846 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
847 env = MakeEnv(tryccache=True),
848 haltOnFailure = True,
849 ))
850
851 # populate dl
852 factory.addStep(ShellCommand(
853 name = "dlrun",
854 description = "Populating dl/",
855 descriptionDone = "dl/ populated",
856 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
857 env = MakeEnv(),
858 logEnviron = False,
859 locks = NetLockDl,
860 ))
861
862 factory.addStep(ShellCommand(
863 name = "cleanbase",
864 description = "Cleaning base-files",
865 command=["make", "package/base-files/clean", "V=s"],
866 ))
867
868 # build
869 factory.addStep(ShellCommand(
870 name = "tools",
871 description = "Building and installing tools",
872 descriptionDone = "Tools built and installed",
873 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
874 env = MakeEnv(tryccache=True),
875 haltOnFailure = True,
876 ))
877
878 factory.addStep(ShellCommand(
879 name = "toolchain",
880 description = "Building and installing toolchain",
881 descriptionDone = "Toolchain built and installed",
882 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
883 env = MakeEnv(),
884 haltOnFailure = True,
885 ))
886
887 factory.addStep(ShellCommand(
888 name = "kmods",
889 description = "Building kmods",
890 descriptionDone = "Kmods built",
891 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
892 env = MakeEnv(),
893 haltOnFailure = True,
894 ))
895
896 # find kernel version
897 factory.addStep(SetPropertyFromCommand(
898 name = "kernelversion",
899 property = "kernelversion",
900 description = "Finding the effective Kernel version",
901 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
902 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
903 ))
904
905 factory.addStep(ShellCommand(
906 name = "pkgclean",
907 description = "Cleaning up package build",
908 descriptionDone = "Package build cleaned up",
909 command=["make", "package/cleanup", "V=s"],
910 ))
911
912 factory.addStep(ShellCommand(
913 name = "pkgbuild",
914 description = "Building packages",
915 descriptionDone = "Packages built",
916 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
917 env = MakeEnv(),
918 haltOnFailure = True,
919 ))
920
921 factory.addStep(ShellCommand(
922 name = "pkginstall",
923 description = "Installing packages",
924 descriptionDone = "Packages installed",
925 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
926 env = MakeEnv(),
927 haltOnFailure = True,
928 ))
929
930 factory.addStep(ShellCommand(
931 name = "pkgindex",
932 description = "Indexing packages",
933 descriptionDone = "Packages indexed",
934 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
935 env = MakeEnv(),
936 haltOnFailure = True,
937 ))
938
939 factory.addStep(ShellCommand(
940 name = "images",
941 description = "Building and installing images",
942 descriptionDone = "Images built and installed",
943 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
944 env = MakeEnv(),
945 haltOnFailure = True,
946 ))
947
948 factory.addStep(ShellCommand(
949 name = "buildinfo",
950 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
951 command = "make -j1 buildinfo V=s || true",
952 env = MakeEnv(),
953 haltOnFailure = True,
954 ))
955
956 factory.addStep(ShellCommand(
957 name = "json_overview_image_info",
958 description = "Generating profiles.json in target folder",
959 command = "make -j1 json_overview_image_info V=s || true",
960 env = MakeEnv(),
961 haltOnFailure = True,
962 ))
963
964 factory.addStep(ShellCommand(
965 name = "checksums",
966 description = "Calculating checksums",
967 descriptionDone = "Checksums calculated",
968 command=["make", "-j1", "checksum", "V=s"],
969 env = MakeEnv(),
970 haltOnFailure = True,
971 ))
972
973 factory.addStep(ShellCommand(
974 name = "kmoddir",
975 descriptionDone = "Kmod directory created",
976 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
977 haltOnFailure = True,
978 doStepIf = IsKmodArchiveEnabled,
979 ))
980
981 factory.addStep(ShellCommand(
982 name = "kmodprepare",
983 description = "Preparing kmod archive",
984 descriptionDone = "Kmod archive prepared",
985 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
986 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
987 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
988 haltOnFailure = True,
989 doStepIf = IsKmodArchiveEnabled,
990 ))
991
992 factory.addStep(ShellCommand(
993 name = "kmodindex",
994 description = "Indexing kmod archive",
995 descriptionDone = "Kmod archive indexed",
996 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
997 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
998 env = MakeEnv(),
999 haltOnFailure = True,
1000 doStepIf = IsKmodArchiveEnabled,
1001 ))
1002
1003 # sign
1004 factory.addStep(MasterShellCommand(
1005 name = "signprepare",
1006 descriptionDone = "Temporary signing directory prepared",
1007 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1008 haltOnFailure = True,
1009 doStepIf = IsSignEnabled,
1010
1011 ))
1012
1013 factory.addStep(ShellCommand(
1014 name = "signpack",
1015 description = "Packing files to sign",
1016 descriptionDone = "Files to sign packed",
1017 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1018 haltOnFailure = True,
1019 doStepIf = IsSignEnabled,
1020 ))
1021
1022 factory.addStep(FileUpload(
1023 workersrc = "sign.tar.gz",
1024 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1025 haltOnFailure = True,
1026 doStepIf = IsSignEnabled,
1027 ))
1028
1029 factory.addStep(MasterShellCommand(
1030 name = "signfiles",
1031 description = "Signing files",
1032 descriptionDone = "Files signed",
1033 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
1034 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1035 haltOnFailure = True,
1036 doStepIf = IsSignEnabled,
1037 ))
1038
1039 factory.addStep(FileDownload(
1040 name = "dlsigntargz",
1041 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1042 workerdest = "sign.tar.gz",
1043 haltOnFailure = True,
1044 doStepIf = IsSignEnabled,
1045 ))
1046
1047 factory.addStep(ShellCommand(
1048 name = "signunpack",
1049 description = "Unpacking signed files",
1050 descriptionDone = "Signed files unpacked",
1051 command = ["tar", "-xzf", "sign.tar.gz"],
1052 haltOnFailure = True,
1053 doStepIf = IsSignEnabled,
1054 ))
1055
1056 # upload
1057 factory.addStep(ShellCommand(
1058 name = "dirprepare",
1059 descriptionDone = "Upload directory structure prepared",
1060 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1061 haltOnFailure = True,
1062 ))
1063
1064 factory.addStep(ShellCommand(
1065 name = "linkprepare",
1066 descriptionDone = "Repository symlink prepared",
1067 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1068 doStepIf = IsNoMasterBuild,
1069 haltOnFailure = True,
1070 ))
1071
1072 factory.addStep(ShellCommand(
1073 name = "kmoddirprepare",
1074 descriptionDone = "Kmod archive upload directory prepared",
1075 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1076 haltOnFailure = True,
1077 doStepIf = IsKmodArchiveEnabled,
1078 ))
1079
1080 factory.addStep(ShellCommand(
1081 name = "dirupload",
1082 description = "Uploading directory structure",
1083 descriptionDone = "Directory structure uploaded",
1084 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1085 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1086 haltOnFailure = True,
1087 logEnviron = False,
1088 locks = NetLockUl,
1089 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1090 ))
1091
1092 # download remote sha256sums to 'target-sha256sums'
1093 factory.addStep(ShellCommand(
1094 name = "target-sha256sums",
1095 description = "Fetching remote sha256sums for target",
1096 descriptionDone = "Remote sha256sums for target fetched",
1097 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1098 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1099 logEnviron = False,
1100 haltOnFailure = False,
1101 flunkOnFailure = False,
1102 warnOnFailure = False,
1103 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1104 ))
1105
1106 # build list of files to upload
1107 factory.addStep(FileDownload(
1108 name = "dlsha2rsyncpl",
1109 mastersrc = scripts_dir + '/sha2rsync.pl',
1110 workerdest = "../sha2rsync.pl",
1111 mode = 0o755,
1112 ))
1113
1114 factory.addStep(ShellCommand(
1115 name = "buildlist",
1116 description = "Building list of files to upload",
1117 descriptionDone = "List of files to upload built",
1118 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1119 haltOnFailure = True,
1120 ))
1121
1122 factory.addStep(FileDownload(
1123 name = "dlrsync.sh",
1124 mastersrc = scripts_dir + '/rsync.sh',
1125 workerdest = "../rsync.sh",
1126 mode = 0o755,
1127 ))
1128
1129 # upload new files and update existing ones
1130 factory.addStep(ShellCommand(
1131 name = "targetupload",
1132 description = "Uploading target files",
1133 descriptionDone = "Target files uploaded",
1134 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1135 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1136 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1137 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1138 haltOnFailure = True,
1139 logEnviron = False,
1140 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1141 ))
1142
1143 # delete files which don't exist locally
1144 factory.addStep(ShellCommand(
1145 name = "targetprune",
1146 description = "Pruning target files",
1147 descriptionDone = "Target files pruned",
1148 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1149 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1150 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1151 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1152 haltOnFailure = True,
1153 logEnviron = False,
1154 locks = NetLockUl,
1155 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1156 ))
1157
1158 factory.addStep(ShellCommand(
1159 name = "kmodupload",
1160 description = "Uploading kmod archive",
1161 descriptionDone = "Kmod archive uploaded",
1162 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1163 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1164 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1165 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1166 haltOnFailure = True,
1167 logEnviron = False,
1168 locks = NetLockUl,
1169 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1170 ))
1171
1172 factory.addStep(ShellCommand(
1173 name = "sourcelist",
1174 description = "Finding source archives to upload",
1175 descriptionDone = "Source archives to upload found",
1176 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1177 haltOnFailure = True,
1178 ))
1179
1180 factory.addStep(ShellCommand(
1181 name = "sourceupload",
1182 description = "Uploading source archives",
1183 descriptionDone = "Source archives uploaded",
1184 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1185 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1186 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1187 haltOnFailure = True,
1188 logEnviron = False,
1189 locks = NetLockUl,
1190 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1191 ))
1192
1193 factory.addStep(ShellCommand(
1194 name = "df",
1195 description = "Reporting disk usage",
1196 command=["df", "-h", "."],
1197 env={'LC_ALL': 'C'},
1198 logEnviron = False,
1199 haltOnFailure = False,
1200 flunkOnFailure = False,
1201 warnOnFailure = False,
1202 alwaysRun = True,
1203 ))
1204
1205 factory.addStep(ShellCommand(
1206 name = "du",
1207 description = "Reporting estimated file space usage",
1208 command=["du", "-sh", "."],
1209 env={'LC_ALL': 'C'},
1210 logEnviron = False,
1211 haltOnFailure = False,
1212 flunkOnFailure = False,
1213 warnOnFailure = False,
1214 alwaysRun = True,
1215 ))
1216
1217 factory.addStep(ShellCommand(
1218 name = "ccachestat",
1219 description = "Reporting ccache stats",
1220 command=["ccache", "-s"],
1221 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1222 logEnviron = False,
1223 want_stderr = False,
1224 haltOnFailure = False,
1225 flunkOnFailure = False,
1226 warnOnFailure = False,
1227 hideStepIf = lambda r, s: r==results.FAILURE,
1228 ))
1229
1230 for brname in branchNames:
1231 bldrname = brname + "_" + target
1232 c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1233
1234
1235 ####### STATUS TARGETS
1236
1237 # 'status' is a list of Status Targets. The results of each build will be
1238 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1239 # including web pages, email senders, and IRC bots.
1240
1241 if "status_bind" in inip1:
1242 c['www'] = {
1243 'port': inip1.get("status_bind"),
1244 'plugins': {
1245 'waterfall_view': True,
1246 'console_view': True,
1247 'grid_view': True
1248 }
1249 }
1250
1251 if "status_user" in inip1 and "status_password" in inip1:
1252 c['www']['auth'] = util.UserPasswordAuth([
1253 (inip1.get("status_user"), inip1.get("status_password"))
1254 ])
1255 c['www']['authz'] = util.Authz(
1256 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1257 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1258 )
1259
1260 c['services'] = []
1261 if ini.has_section("irc"):
1262 iniirc = ini['irc']
1263 irc_host = iniirc.get("host", None)
1264 irc_port = iniirc.getint("port", 6667)
1265 irc_chan = iniirc.get("channel", None)
1266 irc_nick = iniirc.get("nickname", None)
1267 irc_pass = iniirc.get("password", None)
1268
1269 if irc_host and irc_nick and irc_chan:
1270 irc = reporters.IRC(irc_host, irc_nick,
1271 port = irc_port,
1272 password = irc_pass,
1273 channels = [ irc_chan ],
1274 notify_events = [ 'exception', 'problem', 'recovery' ]
1275 )
1276
1277 c['services'].append(irc)
1278
1279 c['revlink'] = util.RevlinkMatch([
1280 r'https://git.openwrt.org/openwrt/(.*).git'
1281 ],
1282 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1283
1284 ####### DB URL
1285
1286 c['db'] = {
1287 # This specifies what database buildbot uses to store its state. You can leave
1288 # this at its default for all but the largest installations.
1289 'db_url' : "sqlite:///state.sqlite",
1290 }
1291
1292 c['buildbotNetUsageData'] = None