phase1: buildfactory: remove deadcode
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
124
125 # coalesce builds
126 c['collapseRequests'] = True
127
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
131 hour=6,
132 )]
133
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
141
142 @returns: datetime instance or None, via Deferred
143 """
144
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
148 [
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
151 ],
152 order=['-complete_at'], limit=1)
153 if not completed:
154 return
155
156 complete_at = completed[0]['complete_at']
157
158 last_build = yield bldr.master.data.get(
159 ('builds', ),
160 [
161 resultspec.Filter('builderid', 'eq', [bldrid]),
162 ],
163 order=['-started_at'], limit=1)
164
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
169
170 return complete_at
171
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
175 not skipped build.
176
177 @returns: list of sorted builders
178 """
179
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
182
183 def bldr_info(bldr):
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
186 return d
187
188 def bldr_sort(item):
189 (complete_at, bldr) = item
190
191 if not complete_at:
192 date = datetime.min
193 complete_at = date.replace(tzinfo=tzutc())
194
195 if is_building(bldr):
196 date = datetime.max
197 complete_at = date.replace(tzinfo=tzutc())
198
199 return (complete_at, bldr.name)
200
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
203
204 for r in results:
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
206
207 return [r[1] for r in results]
208
209 c['prioritizeBuilders'] = prioritizeBuilders
210
211 ####### CHANGESOURCES
212
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
215
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
218
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
221
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
225
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
228
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
232
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
235
236 usign_key = None
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
238
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
242
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
244
245
246 # find targets
247 targets = [ ]
248
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
251 else:
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
253
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
257
258 while True:
259 line = findtargets.stdout.readline()
260 if not line:
261 break
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
264
265
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
268
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
271 repo_url,
272 workdir=work_dir+'/work.git', branch=repo_branch,
273 pollinterval=300))
274
275 ####### SCHEDULERS
276
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
279
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
282 type = "list"
283 strict = True
284
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
288
289 @property
290 def choices(self):
291 taglist = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
293
294 if basever:
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
298
299 while True:
300 line = findtags.stdout.readline()
301
302 if not line:
303 break
304
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
306
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
309
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
312
313 self._choice_list = taglist
314
315 return self._choice_list
316
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
320 return s
321
322 c['schedulers'] = []
323 c['schedulers'].append(SingleBranchScheduler(
324 name = "all",
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
328
329 c['schedulers'].append(ForceScheduler(
330 name = "force",
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
334
335 codebases = [
336 util.CodebaseParameter(
337 "",
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
343 )
344 ],
345
346 reason = util.StringParameter(
347 name = "reason",
348 label = "Reason",
349 default = "Trigger build",
350 required = True,
351 size = 80
352 ),
353
354 properties = [
355 util.NestedParameter(
356 name="options",
357 label="Build Options",
358 layout="vertical",
359 fields=[
360 util.ChoiceStringParameter(
361 name = "target",
362 label = "Build target",
363 default = "all",
364 choices = [ "all" ] + targets
365 ),
366 TagChoiceParameter(
367 name = "tag",
368 label = "Build tag",
369 default = ""
370 )
371 ]
372 )
373 ]
374 ))
375
376 ####### BUILDERS
377
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
381
382 CleanTargetMap = [
383 [ "tools", "tools/clean" ],
384 [ "chain", "toolchain/clean" ],
385 [ "linux", "target/linux/clean" ],
386 [ "dir", "dirclean" ],
387 [ "dist", "distclean" ]
388 ]
389
390 def IsMakeCleanRequested(pattern):
391 def CheckCleanProperty(step):
392 val = step.getProperty("clean")
393 if val and re.match(pattern, val):
394 return True
395 else:
396 return False
397
398 return CheckCleanProperty
399
400 def IsSharedWorkdir(step):
401 return bool(step.getProperty("shared_wd"))
402
403 def IsCleanupRequested(step):
404 if IsSharedWorkdir(step):
405 return False
406 do_cleanup = step.getProperty("do_cleanup")
407 if do_cleanup:
408 return True
409 else:
410 return False
411
412 def IsExpireRequested(step):
413 if IsSharedWorkdir(step):
414 return False
415 else:
416 return not IsCleanupRequested(step)
417
418 def IsGitFreshRequested(step):
419 do_cleanup = step.getProperty("do_cleanup")
420 if do_cleanup:
421 return True
422 else:
423 return False
424
425 def IsGitCleanRequested(step):
426 return not IsGitFreshRequested(step)
427
428 def IsTaggingRequested(step):
429 val = step.getProperty("tag")
430 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
431 return True
432 else:
433 return False
434
435 def IsNoTaggingRequested(step):
436 return not IsTaggingRequested(step)
437
438 def IsNoMasterBuild(step):
439 return repo_branch != "master"
440
441 def GetBaseVersion():
442 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
443 return repo_branch.split('-')[1]
444 else:
445 return "master"
446
447 @properties.renderer
448 def GetVersionPrefix(props):
449 basever = GetBaseVersion()
450 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
451 return "%s/" % props["tag"]
452 elif basever != "master":
453 return "%s-SNAPSHOT/" % basever
454 else:
455 return ""
456
457 @properties.renderer
458 def GetNumJobs(props):
459 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
460 return str(int(int(props["nproc"]) / props["max_builds"]))
461 else:
462 return "1"
463
464 @properties.renderer
465 def GetCC(props):
466 if props.hasProperty("cc_command"):
467 return props["cc_command"]
468 else:
469 return "gcc"
470
471 @properties.renderer
472 def GetCXX(props):
473 if props.hasProperty("cxx_command"):
474 return props["cxx_command"]
475 else:
476 return "g++"
477
478 @properties.renderer
479 def GetCwd(props):
480 if props.hasProperty("builddir"):
481 return props["builddir"]
482 elif props.hasProperty("workdir"):
483 return props["workdir"]
484 else:
485 return "/"
486
487 @properties.renderer
488 def GetCCache(props):
489 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
490 return props["ccache_command"]
491 else:
492 return ""
493
494 def GetNextBuild(builder, requests):
495 for r in requests:
496 if r.properties and r.properties.hasProperty("tag"):
497 return r
498
499 r = requests[0]
500 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
501 return r
502
503 def MakeEnv(overrides=None, tryccache=False):
504 env = {
505 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
506 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
507 }
508 if tryccache:
509 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
510 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
511 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
512 else:
513 env['CC'] = env['CCC']
514 env['CXX'] = env['CCXX']
515 env['CCACHE'] = ''
516 if overrides is not None:
517 env.update(overrides)
518 return env
519
520 @properties.renderer
521 def NetLockDl(props):
522 lock = None
523 if props.hasProperty("dl_lock"):
524 lock = NetLocks[props["dl_lock"]]
525 if lock is not None:
526 return [lock.access('exclusive')]
527 else:
528 return []
529
530 @properties.renderer
531 def NetLockUl(props):
532 lock = None
533 if props.hasProperty("ul_lock"):
534 lock = NetLocks[props["ul_lock"]]
535 if lock is not None:
536 return [lock.access('exclusive')]
537 else:
538 return []
539
540 @util.renderer
541 def TagPropertyValue(props):
542 if props.hasProperty("options"):
543 options = props.getProperty("options")
544 if type(options) is dict:
545 return options.get("tag")
546 return None
547
548 def IsTargetSelected(target):
549 def CheckTargetProperty(step):
550 try:
551 options = step.getProperty("options")
552 if type(options) is dict:
553 selected_target = options.get("target", "all")
554 if selected_target != "all" and selected_target != target:
555 return False
556 except KeyError:
557 pass
558
559 return True
560
561 return CheckTargetProperty
562
563 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
564 try:
565 seckey = base64.b64decode(seckey)
566 except:
567 return None
568
569 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
570 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
571
572
573 c['builders'] = []
574
575 dlLock = locks.WorkerLock("worker_dl")
576
577 workerNames = [ ]
578
579 for worker in c['workers']:
580 workerNames.append(worker.workername)
581
582 force_factory = BuildFactory()
583
584 c['builders'].append(BuilderConfig(
585 name = "00_force_build",
586 workernames = workerNames,
587 factory = force_factory))
588
589 for target in targets:
590 ts = target.split('/')
591
592 factory = BuildFactory()
593
594 # setup shared work directory if required
595 factory.addStep(ShellCommand(
596 name = "sharedwd",
597 description = "Setting up shared work directory",
598 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
599 workdir = ".",
600 haltOnFailure = True,
601 doStepIf = IsSharedWorkdir))
602
603 # find number of cores
604 factory.addStep(SetPropertyFromCommand(
605 name = "nproc",
606 property = "nproc",
607 description = "Finding number of CPUs",
608 command = ["nproc"]))
609
610 # find gcc and g++ compilers
611 factory.addStep(FileDownload(
612 name = "dlfindbinpl",
613 mastersrc = scripts_dir + '/findbin.pl',
614 workerdest = "../findbin.pl",
615 mode = 0o755))
616
617 factory.addStep(SetPropertyFromCommand(
618 name = "gcc",
619 property = "cc_command",
620 description = "Finding gcc command",
621 command = [
622 "../findbin.pl", "gcc", "", "",
623 ],
624 haltOnFailure = True))
625
626 factory.addStep(SetPropertyFromCommand(
627 name = "g++",
628 property = "cxx_command",
629 description = "Finding g++ command",
630 command = [
631 "../findbin.pl", "g++", "", "",
632 ],
633 haltOnFailure = True))
634
635 # see if ccache is available
636 factory.addStep(SetPropertyFromCommand(
637 property = "ccache_command",
638 command = ["which", "ccache"],
639 description = "Testing for ccache command",
640 haltOnFailure = False,
641 flunkOnFailure = False,
642 warnOnFailure = False,
643 ))
644
645 # expire tree if needed
646 if tree_expire > 0:
647 factory.addStep(FileDownload(
648 name = "dlexpiresh",
649 doStepIf = IsExpireRequested,
650 mastersrc = scripts_dir + '/expire.sh',
651 workerdest = "../expire.sh",
652 mode = 0o755))
653
654 factory.addStep(ShellCommand(
655 name = "expire",
656 description = "Checking for build tree expiry",
657 command = ["./expire.sh", str(tree_expire)],
658 workdir = ".",
659 haltOnFailure = True,
660 doStepIf = IsExpireRequested,
661 timeout = 2400))
662
663 # cleanup.sh if needed
664 factory.addStep(FileDownload(
665 name = "dlcleanupsh",
666 mastersrc = scripts_dir + '/cleanup.sh',
667 workerdest = "../cleanup.sh",
668 mode = 0o755,
669 doStepIf = IsCleanupRequested))
670
671 factory.addStep(ShellCommand(
672 name = "cleanold",
673 description = "Cleaning previous builds",
674 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
675 workdir = ".",
676 haltOnFailure = True,
677 doStepIf = IsCleanupRequested,
678 timeout = 2400))
679
680 factory.addStep(ShellCommand(
681 name = "cleanup",
682 description = "Cleaning work area",
683 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
684 workdir = ".",
685 haltOnFailure = True,
686 doStepIf = IsCleanupRequested,
687 timeout = 2400))
688
689 # user-requested clean targets
690 for tuple in CleanTargetMap:
691 factory.addStep(ShellCommand(
692 name = tuple[1],
693 description = 'User-requested "make %s"' % tuple[1],
694 command = ["make", tuple[1], "V=s"],
695 env = MakeEnv(),
696 doStepIf = IsMakeCleanRequested(tuple[0])
697 ))
698
699 # Workaround bug when switching from a checked out tag back to a branch
700 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
701 factory.addStep(ShellCommand(
702 name = "gitcheckout",
703 description = "Ensure that Git HEAD is sane",
704 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
705 haltOnFailure = True))
706
707 # check out the source
708 # Git() runs:
709 # if repo doesn't exist: 'git clone repourl'
710 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
711 # 'git fetch -t repourl branch; git reset --hard revision'
712 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
713 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
714 factory.addStep(Git(
715 name = "gitclean",
716 repourl = repo_url,
717 branch = repo_branch,
718 mode = 'full',
719 method = 'clean',
720 locks = NetLockDl,
721 haltOnFailure = True,
722 doStepIf = IsGitCleanRequested,
723 ))
724
725 factory.addStep(Git(
726 name = "gitfresh",
727 repourl = repo_url,
728 branch = repo_branch,
729 mode = 'full',
730 method = 'fresh',
731 locks = NetLockDl,
732 haltOnFailure = True,
733 doStepIf = IsGitFreshRequested,
734 ))
735
736 # update remote refs
737 factory.addStep(ShellCommand(
738 name = "fetchrefs",
739 description = "Fetching Git remote refs",
740 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
741 haltOnFailure = True
742 ))
743
744 # switch to tag
745 factory.addStep(ShellCommand(
746 name = "switchtag",
747 description = "Checking out Git tag",
748 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
749 haltOnFailure = True,
750 doStepIf = IsTaggingRequested
751 ))
752
753 # Verify that Git HEAD points to a tag or branch
754 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
755 factory.addStep(ShellCommand(
756 name = "gitverify",
757 description = "Ensure that Git HEAD is pointing to a branch or tag",
758 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
759 haltOnFailure = True))
760
761 factory.addStep(ShellCommand(
762 name = "rmtmp",
763 description = "Remove tmp folder",
764 command=["rm", "-rf", "tmp/"]))
765
766 # feed
767 factory.addStep(ShellCommand(
768 name = "rmfeedlinks",
769 description = "Remove feed symlinks",
770 command=["rm", "-rf", "package/feeds/"]))
771
772 factory.addStep(StringDownload(
773 name = "ccachecc",
774 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
775 workerdest = "../ccache_cc.sh",
776 mode = 0o755,
777 ))
778
779 factory.addStep(StringDownload(
780 name = "ccachecxx",
781 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
782 workerdest = "../ccache_cxx.sh",
783 mode = 0o755,
784 ))
785
786 # feed
787 factory.addStep(ShellCommand(
788 name = "updatefeeds",
789 description = "Updating feeds",
790 command=["./scripts/feeds", "update"],
791 env = MakeEnv(tryccache=True),
792 haltOnFailure = True,
793 locks = NetLockDl,
794 ))
795
796 # feed
797 factory.addStep(ShellCommand(
798 name = "installfeeds",
799 description = "Installing feeds",
800 command=["./scripts/feeds", "install", "-a"],
801 env = MakeEnv(tryccache=True),
802 haltOnFailure = True
803 ))
804
805 # seed config
806 if config_seed is not None:
807 factory.addStep(StringDownload(
808 name = "dlconfigseed",
809 s = config_seed + '\n',
810 workerdest = ".config",
811 mode = 0o644
812 ))
813
814 # configure
815 factory.addStep(ShellCommand(
816 name = "newconfig",
817 description = "Seeding .config",
818 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
819 ))
820
821 factory.addStep(ShellCommand(
822 name = "delbin",
823 description = "Removing output directory",
824 command = ["rm", "-rf", "bin/"]
825 ))
826
827 factory.addStep(ShellCommand(
828 name = "defconfig",
829 description = "Populating .config",
830 command = ["make", "defconfig"],
831 env = MakeEnv()
832 ))
833
834 # check arch
835 factory.addStep(ShellCommand(
836 name = "checkarch",
837 description = "Checking architecture",
838 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
839 logEnviron = False,
840 want_stdout = False,
841 want_stderr = False,
842 haltOnFailure = True
843 ))
844
845 # find libc suffix
846 factory.addStep(SetPropertyFromCommand(
847 name = "libc",
848 property = "libc",
849 description = "Finding libc suffix",
850 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
851
852 # install build key
853 if usign_key is not None:
854 factory.addStep(StringDownload(
855 name = "dlkeybuildpub",
856 s = UsignSec2Pub(usign_key, usign_comment),
857 workerdest = "key-build.pub",
858 mode = 0o600,
859 ))
860
861 factory.addStep(StringDownload(
862 name = "dlkeybuild",
863 s = "# fake private key",
864 workerdest = "key-build",
865 mode = 0o600,
866 ))
867
868 factory.addStep(StringDownload(
869 name = "dlkeybuilducert",
870 s = "# fake certificate",
871 workerdest = "key-build.ucert",
872 mode = 0o600,
873 ))
874
875 # prepare dl
876 factory.addStep(ShellCommand(
877 name = "dldir",
878 description = "Preparing dl/",
879 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
880 logEnviron = False,
881 want_stdout = False
882 ))
883
884 # prepare tar
885 factory.addStep(ShellCommand(
886 name = "dltar",
887 description = "Building and installing GNU tar",
888 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
889 env = MakeEnv(tryccache=True),
890 haltOnFailure = True
891 ))
892
893 # populate dl
894 factory.addStep(ShellCommand(
895 name = "dlrun",
896 description = "Populating dl/",
897 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
898 env = MakeEnv(),
899 logEnviron = False,
900 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
901 ))
902
903 factory.addStep(ShellCommand(
904 name = "cleanbase",
905 description = "Cleaning base-files",
906 command=["make", "package/base-files/clean", "V=s"]
907 ))
908
909 # build
910 factory.addStep(ShellCommand(
911 name = "tools",
912 description = "Building and installing tools",
913 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
914 env = MakeEnv(tryccache=True),
915 haltOnFailure = True
916 ))
917
918 factory.addStep(ShellCommand(
919 name = "toolchain",
920 description = "Building and installing toolchain",
921 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
922 env = MakeEnv(),
923 haltOnFailure = True
924 ))
925
926 factory.addStep(ShellCommand(
927 name = "kmods",
928 description = "Building kmods",
929 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
930 env = MakeEnv(),
931 haltOnFailure = True
932 ))
933
934 # find kernel version
935 factory.addStep(SetPropertyFromCommand(
936 name = "kernelversion",
937 property = "kernelversion",
938 description = "Finding the effective Kernel version",
939 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
940 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
941 ))
942
943 factory.addStep(ShellCommand(
944 name = "pkgclean",
945 description = "Cleaning up package build",
946 command=["make", "package/cleanup", "V=s"]
947 ))
948
949 factory.addStep(ShellCommand(
950 name = "pkgbuild",
951 description = "Building packages",
952 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
953 env = MakeEnv(),
954 haltOnFailure = True
955 ))
956
957 factory.addStep(ShellCommand(
958 name = "pkginstall",
959 description = "Installing packages",
960 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
961 env = MakeEnv(),
962 haltOnFailure = True
963 ))
964
965 factory.addStep(ShellCommand(
966 name = "pkgindex",
967 description = "Indexing packages",
968 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
969 env = MakeEnv(),
970 haltOnFailure = True
971 ))
972
973 factory.addStep(ShellCommand(
974 name = "images",
975 description = "Building and installing images",
976 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
977 env = MakeEnv(),
978 haltOnFailure = True
979 ))
980
981 factory.addStep(ShellCommand(
982 name = "buildinfo",
983 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
984 command = "make -j1 buildinfo V=s || true",
985 env = MakeEnv(),
986 haltOnFailure = True
987 ))
988
989 factory.addStep(ShellCommand(
990 name = "json_overview_image_info",
991 description = "Generate profiles.json in target folder",
992 command = "make -j1 json_overview_image_info V=s || true",
993 env = MakeEnv(),
994 haltOnFailure = True
995 ))
996
997 factory.addStep(ShellCommand(
998 name = "checksums",
999 description = "Calculating checksums",
1000 command=["make", "-j1", "checksum", "V=s"],
1001 env = MakeEnv(),
1002 haltOnFailure = True
1003 ))
1004
1005 if enable_kmod_archive:
1006 factory.addStep(ShellCommand(
1007 name = "kmoddir",
1008 description = "Creating kmod directory",
1009 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1010 haltOnFailure = True
1011 ))
1012
1013 factory.addStep(ShellCommand(
1014 name = "kmodprepare",
1015 description = "Preparing kmod archive",
1016 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1017 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1018 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1019 haltOnFailure = True
1020 ))
1021
1022 factory.addStep(ShellCommand(
1023 name = "kmodindex",
1024 description = "Indexing kmod archive",
1025 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1026 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1027 env = MakeEnv(),
1028 haltOnFailure = True
1029 ))
1030
1031 # sign
1032 if ini.has_option("gpg", "key") or usign_key is not None:
1033 factory.addStep(MasterShellCommand(
1034 name = "signprepare",
1035 description = "Preparing temporary signing directory",
1036 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1037 haltOnFailure = True
1038 ))
1039
1040 factory.addStep(ShellCommand(
1041 name = "signpack",
1042 description = "Packing files to sign",
1043 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1044 haltOnFailure = True
1045 ))
1046
1047 factory.addStep(FileUpload(
1048 workersrc = "sign.tar.gz",
1049 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1050 haltOnFailure = True
1051 ))
1052
1053 factory.addStep(MasterShellCommand(
1054 name = "signfiles",
1055 description = "Signing files",
1056 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1057 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1058 haltOnFailure = True
1059 ))
1060
1061 factory.addStep(FileDownload(
1062 name = "dlsigntargz",
1063 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1064 workerdest = "sign.tar.gz",
1065 haltOnFailure = True
1066 ))
1067
1068 factory.addStep(ShellCommand(
1069 name = "signunpack",
1070 description = "Unpacking signed files",
1071 command = ["tar", "-xzf", "sign.tar.gz"],
1072 haltOnFailure = True
1073 ))
1074
1075 # upload
1076 factory.addStep(ShellCommand(
1077 name = "dirprepare",
1078 description = "Preparing upload directory structure",
1079 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1080 haltOnFailure = True
1081 ))
1082
1083 factory.addStep(ShellCommand(
1084 name = "linkprepare",
1085 description = "Preparing repository symlink",
1086 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1087 doStepIf = IsNoMasterBuild,
1088 haltOnFailure = True
1089 ))
1090
1091 if enable_kmod_archive:
1092 factory.addStep(ShellCommand(
1093 name = "kmoddirprepare",
1094 description = "Preparing kmod archive upload directory",
1095 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1096 haltOnFailure = True
1097 ))
1098
1099 factory.addStep(ShellCommand(
1100 name = "dirupload",
1101 description = "Uploading directory structure",
1102 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1103 env={'RSYNC_PASSWORD': rsync_bin_key},
1104 haltOnFailure = True,
1105 logEnviron = False,
1106 locks = NetLockUl,
1107 ))
1108
1109 # download remote sha256sums to 'target-sha256sums'
1110 factory.addStep(ShellCommand(
1111 name = "target-sha256sums",
1112 description = "Fetching remote sha256sums for target",
1113 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1114 env={'RSYNC_PASSWORD': rsync_bin_key},
1115 logEnviron = False,
1116 haltOnFailure = False,
1117 flunkOnFailure = False,
1118 warnOnFailure = False,
1119 ))
1120
1121 # build list of files to upload
1122 factory.addStep(FileDownload(
1123 name = "dlsha2rsyncpl",
1124 mastersrc = scripts_dir + '/sha2rsync.pl',
1125 workerdest = "../sha2rsync.pl",
1126 mode = 0o755,
1127 ))
1128
1129 factory.addStep(ShellCommand(
1130 name = "buildlist",
1131 description = "Building list of files to upload",
1132 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1133 haltOnFailure = True,
1134 ))
1135
1136 factory.addStep(FileDownload(
1137 name = "dlrsync.sh",
1138 mastersrc = scripts_dir + '/rsync.sh',
1139 workerdest = "../rsync.sh",
1140 mode = 0o755
1141 ))
1142
1143 # upload new files and update existing ones
1144 factory.addStep(ShellCommand(
1145 name = "targetupload",
1146 description = "Uploading target files",
1147 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1148 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1149 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1150 env={'RSYNC_PASSWORD': rsync_bin_key},
1151 haltOnFailure = True,
1152 logEnviron = False,
1153 ))
1154
1155 # delete files which don't exist locally
1156 factory.addStep(ShellCommand(
1157 name = "targetprune",
1158 description = "Pruning target files",
1159 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1160 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1161 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1162 env={'RSYNC_PASSWORD': rsync_bin_key},
1163 haltOnFailure = True,
1164 logEnviron = False,
1165 locks = NetLockUl,
1166 ))
1167
1168 if enable_kmod_archive:
1169 factory.addStep(ShellCommand(
1170 name = "kmodupload",
1171 description = "Uploading kmod archive",
1172 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1173 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1174 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1175 env={'RSYNC_PASSWORD': rsync_bin_key},
1176 haltOnFailure = True,
1177 logEnviron = False,
1178 locks = NetLockUl,
1179 ))
1180
1181 if rsync_src_url is not None:
1182 factory.addStep(ShellCommand(
1183 name = "sourcelist",
1184 description = "Finding source archives to upload",
1185 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1186 haltOnFailure = True
1187 ))
1188
1189 factory.addStep(ShellCommand(
1190 name = "sourceupload",
1191 description = "Uploading source archives",
1192 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1193 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1194 env={'RSYNC_PASSWORD': rsync_src_key},
1195 haltOnFailure = True,
1196 logEnviron = False,
1197 locks = NetLockUl,
1198 ))
1199
1200 factory.addStep(ShellCommand(
1201 name = "df",
1202 description = "Reporting disk usage",
1203 command=["df", "-h", "."],
1204 env={'LC_ALL': 'C'},
1205 haltOnFailure = False,
1206 flunkOnFailure = False,
1207 warnOnFailure = False,
1208 alwaysRun = True
1209 ))
1210
1211 factory.addStep(ShellCommand(
1212 name = "du",
1213 description = "Reporting estimated file space usage",
1214 command=["du", "-sh", "."],
1215 env={'LC_ALL': 'C'},
1216 haltOnFailure = False,
1217 flunkOnFailure = False,
1218 warnOnFailure = False,
1219 alwaysRun = True
1220 ))
1221
1222 factory.addStep(ShellCommand(
1223 name = "ccachestat",
1224 description = "Reporting ccache stats",
1225 command=["ccache", "-s"],
1226 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1227 want_stderr = False,
1228 haltOnFailure = False,
1229 flunkOnFailure = False,
1230 warnOnFailure = False,
1231 alwaysRun = True,
1232 ))
1233
1234 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1235
1236 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1237 force_factory.addStep(steps.Trigger(
1238 name = "trigger_%s" % target,
1239 description = "Triggering %s build" % target,
1240 schedulerNames = [ "trigger_%s" % target ],
1241 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1242 doStepIf = IsTargetSelected(target)
1243 ))
1244
1245
1246 ####### STATUS TARGETS
1247
1248 # 'status' is a list of Status Targets. The results of each build will be
1249 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1250 # including web pages, email senders, and IRC bots.
1251
1252 if "status_bind" in inip1:
1253 c['www'] = {
1254 'port': inip1.get("status_bind"),
1255 'plugins': {
1256 'waterfall_view': True,
1257 'console_view': True,
1258 'grid_view': True
1259 }
1260 }
1261
1262 if "status_user" in inip1 and "status_password" in inip1:
1263 c['www']['auth'] = util.UserPasswordAuth([
1264 (inip1.get("status_user"), inip1.get("status_password"))
1265 ])
1266 c['www']['authz'] = util.Authz(
1267 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1268 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1269 )
1270
1271 c['services'] = []
1272 if ini.has_section("irc"):
1273 iniirc = ini['irc']
1274 irc_host = iniirc.get("host", None)
1275 irc_port = iniirc.getint("port", 6667)
1276 irc_chan = iniirc.get("channel", None)
1277 irc_nick = iniirc.get("nickname", None)
1278 irc_pass = iniirc.get("password", None)
1279
1280 if irc_host and irc_nick and irc_chan:
1281 irc = reporters.IRC(irc_host, irc_nick,
1282 port = irc_port,
1283 password = irc_pass,
1284 channels = [ irc_chan ],
1285 notify_events = [ 'exception', 'problem', 'recovery' ]
1286 )
1287
1288 c['services'].append(irc)
1289
1290 c['revlink'] = util.RevlinkMatch([
1291 r'https://git.openwrt.org/openwrt/(.*).git'
1292 ],
1293 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1294
1295 ####### DB URL
1296
1297 c['db'] = {
1298 # This specifies what database buildbot uses to store its state. You can leave
1299 # this at its default for all but the largest installations.
1300 'db_url' : "sqlite:///state.sqlite",
1301 }
1302
1303 c['buildbotNetUsageData'] = None