phase1: cleanup redundant steps
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64
65 rsync_defopts = ["-v", "--timeout=120"]
66
67 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72 def ini_parse_branch(section):
73 b = {}
74 name = section.get("name")
75
76 if not name:
77 raise ValueError("missing 'name' in " + repr(section))
78 if name in branches:
79 raise ValueError("duplicate branch name in " + repr(section))
80
81 b["name"] = name
82 b["bin_url"] = section.get("binary_url")
83 b["bin_key"] = section.get("binary_password")
84
85 b["src_url"] = section.get("source_url")
86 b["src_key"] = section.get("source_password")
87
88 b["gpg_key"] = section.get("gpg_key")
89
90 b["usign_key"] = section.get("usign_key")
91 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
92 b["usign_comment"] = section.get("usign_comment", usign_comment)
93
94 b["config_seed"] = section.get("config_seed")
95
96 b["kmod_archive"] = section.getboolean("kmod_archive", False)
97
98 branches[name] = b
99 log.msg("Configured branch: {}".format(name))
100
101 # PB port can be either a numeric port or a connection string
102 pb_port = inip1.get("port") or 9989
103
104 # This is the dictionary that the buildmaster pays attention to. We also use
105 # a shorter alias to save typing.
106 c = BuildmasterConfig = {}
107
108 ####### PROJECT IDENTITY
109
110 # the 'title' string will appear at the top of this buildbot
111 # installation's html.WebStatus home page (linked to the
112 # 'titleURL') and is embedded in the title of the waterfall HTML page.
113
114 c['title'] = ini['general'].get("title")
115 c['titleURL'] = ini['general'].get("title_url")
116
117 # the 'buildbotURL' string should point to the location where the buildbot's
118 # internal web server (usually the html.WebStatus page) is visible. This
119 # typically uses the port number set in the Waterfall 'status' entry, but
120 # with an externally-visible host name which the buildbot cannot figure out
121 # without some help.
122
123 c['buildbotURL'] = inip1.get("buildbot_url")
124
125 ####### BUILDWORKERS
126
127 # The 'workers' list defines the set of recognized buildworkers. Each element is
128 # a Worker object, specifying a unique worker name and password. The same
129 # worker name and password must be configured on the worker.
130
131 c['workers'] = []
132 NetLocks = dict()
133
134 def ini_parse_workers(section):
135 name = section.get("name")
136 password = section.get("password")
137 phase = section.getint("phase")
138 tagonly = section.getboolean("tag_only")
139 rsyncipv4 = section.getboolean("rsync_ipv4")
140
141 if not name or not password or not phase == 1:
142 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
143 return
144
145 sl_props = { 'tag_only':tagonly }
146 if "dl_lock" in section:
147 lockname = section.get("dl_lock")
148 sl_props['dl_lock'] = lockname
149 if lockname not in NetLocks:
150 NetLocks[lockname] = locks.MasterLock(lockname)
151 if "ul_lock" in section:
152 lockname = section.get("ul_lock")
153 sl_props['ul_lock'] = lockname
154 if lockname not in NetLocks:
155 NetLocks[lockname] = locks.MasterLock(lockname)
156 if rsyncipv4:
157 sl_props['rsync_ipv4'] = True # only set prop if required, we use '+' Interpolate substitution
158
159 log.msg("Configured worker: {}".format(name))
160 # NB: phase1 build factory requires workers to be single-build only
161 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
162
163
164 for section in ini.sections():
165 if section.startswith("branch "):
166 ini_parse_branch(ini[section])
167
168 if section.startswith("worker "):
169 ini_parse_workers(ini[section])
170
171 # list of branches in build-priority order
172 branchNames = [branches[b]["name"] for b in branches]
173
174 c['protocols'] = {'pb': {'port': pb_port}}
175
176 # coalesce builds
177 c['collapseRequests'] = True
178
179 # Reduce amount of backlog data
180 c['configurators'] = [util.JanitorConfigurator(
181 logHorizon=timedelta(days=3),
182 hour=6,
183 )]
184
185 @defer.inlineCallbacks
186 def getNewestCompleteTime(bldr):
187 """Returns the complete_at of the latest completed and not SKIPPED
188 build request for this builder, or None if there are no such build
189 requests. We need to filter out SKIPPED requests because we're
190 using collapseRequests=True which is unfortunately marking all
191 previous requests as complete when new buildset is created.
192
193 @returns: datetime instance or None, via Deferred
194 """
195
196 bldrid = yield bldr.getBuilderId()
197 completed = yield bldr.master.data.get(
198 ('builders', bldrid, 'buildrequests'),
199 [
200 resultspec.Filter('complete', 'eq', [True]),
201 resultspec.Filter('results', 'ne', [results.SKIPPED]),
202 ],
203 order=['-complete_at'], limit=1)
204 if not completed:
205 return
206
207 complete_at = completed[0]['complete_at']
208
209 last_build = yield bldr.master.data.get(
210 ('builds', ),
211 [
212 resultspec.Filter('builderid', 'eq', [bldrid]),
213 ],
214 order=['-started_at'], limit=1)
215
216 if last_build and last_build[0]:
217 last_complete_at = last_build[0]['complete_at']
218 if last_complete_at and (last_complete_at > complete_at):
219 return last_complete_at
220
221 return complete_at
222
223 @defer.inlineCallbacks
224 def prioritizeBuilders(master, builders):
225 """Returns sorted list of builders by their last timestamp of completed and
226 not skipped build, ordered first by branch name.
227
228 @returns: list of sorted builders
229 """
230
231 bldrNamePrio = { "__Janitor": 0, "00_force_build": 0 }
232 i = 1
233 for bname in branchNames:
234 bldrNamePrio[bname] = i
235 i += 1
236
237 def is_building(bldr):
238 return bool(bldr.building) or bool(bldr.old_building)
239
240 def bldr_info(bldr):
241 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
242 d.addCallback(lambda complete_at: (complete_at, bldr))
243 return d
244
245 def bldr_sort(item):
246 (complete_at, bldr) = item
247
248 pos = 99
249 for (name, prio) in bldrNamePrio.items():
250 if bldr.name.startswith(name):
251 pos = prio
252 break
253
254 if not complete_at:
255 date = datetime.min
256 complete_at = date.replace(tzinfo=tzutc())
257
258 if is_building(bldr):
259 date = datetime.max
260 complete_at = date.replace(tzinfo=tzutc())
261
262 return (pos, complete_at, bldr.name)
263
264 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
265 results.sort(key=bldr_sort)
266
267 #for r in results:
268 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
269
270 return [r[1] for r in results]
271
272 c['prioritizeBuilders'] = prioritizeBuilders
273
274 ####### CHANGESOURCES
275
276 # find targets
277 targets = set()
278
279 def populateTargets():
280 """ fetch a shallow clone of each configured branch in turn:
281 execute dump-target-info.pl and collate the results to ensure
282 targets that only exist in specific branches get built.
283 This takes a while during master startup but is executed only once.
284 """
285 log.msg("Populating targets, this will take time")
286 sourcegit = work_dir + '/source.git'
287 for branch in branchNames:
288 if os.path.isdir(sourcegit):
289 subprocess.call(["rm", "-rf", sourcegit])
290
291 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
292
293 os.makedirs(sourcegit + '/tmp', exist_ok=True)
294 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
295 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
296
297 while True:
298 line = findtargets.stdout.readline()
299 if not line:
300 break
301 ta = line.decode().strip().split(' ')
302 targets.add(ta[0])
303
304 subprocess.call(["rm", "-rf", sourcegit])
305
306 populateTargets()
307
308 # the 'change_source' setting tells the buildmaster how it should find out
309 # about source code changes.
310
311 c['change_source'] = []
312 c['change_source'].append(GitPoller(
313 repo_url,
314 workdir=work_dir+'/work.git', branches=branchNames,
315 pollAtLaunch=True, pollinterval=300))
316
317 ####### SCHEDULERS
318
319 # Configure the Schedulers, which decide how to react to incoming changes.
320
321 # Selector for known valid tags
322 class TagChoiceParameter(BaseParameter):
323 spec_attributes = ["strict", "choices"]
324 type = "list"
325 strict = True
326
327 def __init__(self, name, label=None, **kw):
328 super().__init__(name, label, **kw)
329 self._choice_list = []
330
331 def getRevTags(self, findtag=None):
332 taglist = []
333 branchvers = []
334
335 # we will filter out tags that do no match the configured branches
336 for b in branchNames:
337 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
338 if basever:
339 branchvers.append(basever[1])
340
341 # grab tags from remote repository
342 alltags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
345
346 while True:
347 line = alltags.stdout.readline()
348
349 if not line:
350 break
351
352 (rev, tag) = line.split()
353
354 # does it match known format? ('vNN.NN.NN(-rcN)')
355 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
356
357 # only list valid tags matching configured branches
358 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
359 # if we want a specific tag, ignore all that don't match
360 if findtag and findtag != tagver[1]:
361 continue
362 taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]})
363
364 return taglist
365
366 @property
367 def choices(self):
368 taglist = [rt['tag'] for rt in self.getRevTags()]
369 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
370 taglist.insert(0, '')
371
372 self._choice_list = taglist
373
374 return self._choice_list
375
376 def updateFromKwargs(self, properties, kwargs, **unused):
377 tag = self.getFromKwargs(kwargs)
378 properties[self.name] = tag
379
380 # find the commit matching the tag
381 findtag = self.getRevTags(tag)
382
383 if not findtag:
384 raise ValidationError("Couldn't find tag")
385
386 properties['force_revision'] = findtag[0]['rev']
387
388 # find the branch matching the tag
389 branch = None
390 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
391 for b in branchNames:
392 if b.endswith(branchver[1]):
393 branch = b
394
395 if not branch:
396 raise ValidationError("Couldn't find branch")
397
398 properties['force_branch'] = branch
399
400 def parse_from_arg(self, s):
401 if self.strict and s not in self._choice_list:
402 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
403 return s
404
405 @util.renderer
406 @defer.inlineCallbacks
407 def builderNames(props):
408 """ since we have per branch and per target builders,
409 address the relevant builder for each new buildrequest
410 based on the request's desired branch and target.
411 """
412 branch = props.getProperty("branch")
413 target = props.getProperty("target", "")
414
415 if target == "all":
416 target = ""
417
418 # if that didn't work, try sourcestamp to find a branch
419 if not branch:
420 # match builders with target branch
421 ss = props.sourcestamps[0]
422 if ss:
423 branch = ss['branch']
424 else:
425 log.msg("couldn't find builder")
426 return [] # nothing works
427
428 bname = branch + "_" + target
429 builders = []
430
431 for b in (yield props.master.data.get(('builders',))):
432 if not b['name'].startswith(bname):
433 continue
434 builders.append(b['name'])
435
436 return builders
437
438 c['schedulers'] = []
439 c['schedulers'].append(AnyBranchScheduler(
440 name = "all",
441 change_filter = util.ChangeFilter(branch=branchNames),
442 treeStableTimer = 15*60,
443 builderNames = builderNames))
444
445 c['schedulers'].append(ForceScheduler(
446 name = "force",
447 buttonName = "Force builds",
448 label = "Force build details",
449 builderNames = [ "00_force_build" ],
450
451 codebases = [
452 util.CodebaseParameter(
453 "",
454 label = "Repository",
455 branch = util.FixedParameter(name = "branch", default = ""),
456 revision = util.FixedParameter(name = "revision", default = ""),
457 repository = util.FixedParameter(name = "repository", default = ""),
458 project = util.FixedParameter(name = "project", default = "")
459 )
460 ],
461
462 reason = util.StringParameter(
463 name = "reason",
464 label = "Reason",
465 default = "Trigger build",
466 required = True,
467 size = 80
468 ),
469
470 properties = [
471 # NB: avoid nesting to simplify processing of properties
472 util.ChoiceStringParameter(
473 name = "target",
474 label = "Build target",
475 default = "all",
476 choices = [ "all" ] + list(targets)
477 ),
478 TagChoiceParameter(
479 name = "tag",
480 label = "Build tag",
481 default = ""
482 )
483 ]
484 ))
485
486 c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames))
487
488 ####### BUILDERS
489
490 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
491 # what steps, and which workers can execute them. Note that any particular build will
492 # only take place on one worker.
493
494 def IsNoMasterBuild(step):
495 return step.getProperty("branch") != "master"
496
497 def IsUsignEnabled(step):
498 branch = step.getProperty("branch")
499 return branch and branches[branch].get("usign_key")
500
501 def IsSignEnabled(step):
502 branch = step.getProperty("branch")
503 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
504
505 def IsKmodArchiveEnabled(step):
506 branch = step.getProperty("branch")
507 return branch and branches[branch].get("kmod_archive")
508
509 def IsKmodArchiveAndRsyncEnabled(step):
510 branch = step.getProperty("branch")
511 return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
512
513 def GetBaseVersion(branch):
514 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
515 return branch.split('-')[1]
516 else:
517 return "master"
518
519 @properties.renderer
520 def GetVersionPrefix(props):
521 branch = props.getProperty("branch")
522 basever = GetBaseVersion(branch)
523 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
524 return "%s/" % props["tag"][1:]
525 elif basever != "master":
526 return "%s-SNAPSHOT/" % basever
527 else:
528 return ""
529
530 @util.renderer
531 def GetConfigSeed(props):
532 branch = props.getProperty("branch")
533 return branch and branches[branch].get("config_seed") or ""
534
535 @util.renderer
536 def GetRsyncParams(props, srcorbin, urlorkey):
537 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
538 branch = props.getProperty("branch")
539 opt = srcorbin + "_" + urlorkey
540 return branch and branches[branch].get(opt)
541
542 @util.renderer
543 def GetUsignKey(props):
544 branch = props.getProperty("branch")
545 return branch and branches[branch].get("usign_key")
546
547 def GetNextBuild(builder, requests):
548 for r in requests:
549 if r.properties:
550 # order tagged build first
551 if r.properties.hasProperty("tag"):
552 return r
553
554 r = requests[0]
555 #log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
556 return r
557
558 def MakeEnv(overrides=None, tryccache=False):
559 env = {
560 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
561 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
562 }
563 if tryccache:
564 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
565 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
566 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
567 else:
568 env['CC'] = env['CCC']
569 env['CXX'] = env['CCXX']
570 env['CCACHE'] = ''
571 if overrides is not None:
572 env.update(overrides)
573 return env
574
575 @properties.renderer
576 def NetLockDl(props, extralock=None):
577 lock = None
578 if props.hasProperty("dl_lock"):
579 lock = NetLocks[props["dl_lock"]]
580 if lock is not None:
581 return [lock.access('exclusive')]
582 else:
583 return []
584
585 @properties.renderer
586 def NetLockUl(props):
587 lock = None
588 if props.hasProperty("ul_lock"):
589 lock = NetLocks[props["ul_lock"]]
590 if lock is not None:
591 return [lock.access('exclusive')]
592 else:
593 return []
594
595 def IsTargetSelected(target):
596 def CheckTargetProperty(step):
597 selected_target = step.getProperty("target", "all")
598 if selected_target != "all" and selected_target != target:
599 return False
600 return True
601
602 return CheckTargetProperty
603
604 @util.renderer
605 def UsignSec2Pub(props):
606 branch = props.getProperty("branch")
607 try:
608 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
609 seckey = branches[branch].get("usign_key")
610 seckey = base64.b64decode(seckey)
611 except Exception:
612 return None
613
614 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
615 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
616
617
618 def canStartBuild(builder, wfb, request):
619 """ filter out non tag requests for tag_only workers. """
620 wtagonly = wfb.worker.properties.getProperty('tag_only')
621 tag = request.properties.getProperty('tag')
622
623 if wtagonly and not tag:
624 return False
625
626 return True
627
628 c['builders'] = []
629
630 workerNames = [ ]
631
632 for worker in c['workers']:
633 workerNames.append(worker.workername)
634
635 # add a single LocalWorker to handle the forcebuild builder
636 c['workers'].append(LocalWorker("__local_force_build", max_builds=1))
637
638 force_factory = BuildFactory()
639 force_factory.addStep(steps.Trigger(
640 name = "trigger_build",
641 schedulerNames = [ "trigger" ],
642 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
643 set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") },
644 ))
645
646 c['builders'].append(BuilderConfig(
647 name = "00_force_build",
648 workername = "__local_force_build",
649 factory = force_factory))
650
651
652 # NB the phase1 build factory assumes workers are single-build only
653 for target in targets:
654 ts = target.split('/')
655
656 factory = BuildFactory()
657
658 # setup shared work directory if required
659 factory.addStep(ShellCommand(
660 name = "sharedwd",
661 descriptionDone = "Shared work directory set up",
662 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
663 workdir = ".",
664 haltOnFailure = True,
665 ))
666
667 # find number of cores
668 factory.addStep(SetPropertyFromCommand(
669 name = "nproc",
670 property = "nproc",
671 description = "Finding number of CPUs",
672 command = ["nproc"],
673 ))
674
675 # find gcc and g++ compilers
676 factory.addStep(FileDownload(
677 name = "dlfindbinpl",
678 mastersrc = scripts_dir + '/findbin.pl',
679 workerdest = "../findbin.pl",
680 mode = 0o755,
681 ))
682
683 factory.addStep(SetPropertyFromCommand(
684 name = "gcc",
685 property = "cc_command",
686 description = "Finding gcc command",
687 command = ["../findbin.pl", "gcc", "", ""],
688 haltOnFailure = True,
689 ))
690
691 factory.addStep(SetPropertyFromCommand(
692 name = "g++",
693 property = "cxx_command",
694 description = "Finding g++ command",
695 command = ["../findbin.pl", "g++", "", ""],
696 haltOnFailure = True,
697 ))
698
699 # see if ccache is available
700 factory.addStep(SetPropertyFromCommand(
701 name = "ccache",
702 property = "ccache_command",
703 description = "Testing for ccache command",
704 command = ["which", "ccache"],
705 haltOnFailure = False,
706 flunkOnFailure = False,
707 warnOnFailure = False,
708 hideStepIf = lambda r, s: r==results.FAILURE,
709 ))
710
711 # check out the source
712 # Git() runs:
713 # if repo doesn't exist: 'git clone repourl'
714 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
715 # git cat-file -e <commit>
716 # git checkout -f <commit>
717 # git checkout -B <branch>
718 # git rev-parse HEAD
719 factory.addStep(Git(
720 name = "git",
721 repourl = repo_url,
722 mode = 'full',
723 method = 'fresh',
724 locks = NetLockDl,
725 haltOnFailure = True,
726 ))
727
728 # workaround for https://github.com/openwrt/buildbot/issues/5
729 factory.addStep(Git(
730 name = "git me once more please",
731 repourl = repo_url,
732 mode = 'full',
733 method = 'fresh',
734 locks = NetLockDl,
735 haltOnFailure = True,
736 ))
737
738 # update remote refs
739 factory.addStep(ShellCommand(
740 name = "fetchrefs",
741 description = "Fetching Git remote refs",
742 descriptionDone = "Git remote refs fetched",
743 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
744 haltOnFailure = True,
745 ))
746
747 # Verify that Git HEAD points to a tag or branch
748 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
749 factory.addStep(ShellCommand(
750 name = "gitverify",
751 description = "Ensuring that Git HEAD is pointing to a branch or tag",
752 descriptionDone = "Git HEAD is sane",
753 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
754 haltOnFailure = True,
755 ))
756
757 factory.addStep(StringDownload(
758 name = "ccachecc",
759 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
760 workerdest = "../ccache_cc.sh",
761 mode = 0o755,
762 ))
763
764 factory.addStep(StringDownload(
765 name = "ccachecxx",
766 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
767 workerdest = "../ccache_cxx.sh",
768 mode = 0o755,
769 ))
770
771 # feed
772 factory.addStep(ShellCommand(
773 name = "updatefeeds",
774 description = "Updating feeds",
775 command=["./scripts/feeds", "update"],
776 env = MakeEnv(tryccache=True),
777 haltOnFailure = True,
778 locks = NetLockDl,
779 ))
780
781 # feed
782 factory.addStep(ShellCommand(
783 name = "installfeeds",
784 description = "Installing feeds",
785 command=["./scripts/feeds", "install", "-a"],
786 env = MakeEnv(tryccache=True),
787 haltOnFailure = True,
788 ))
789
790 # seed config
791 factory.addStep(StringDownload(
792 name = "dlconfigseed",
793 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
794 workerdest = ".config",
795 mode = 0o644,
796 ))
797
798 # configure
799 factory.addStep(ShellCommand(
800 name = "newconfig",
801 descriptionDone = ".config seeded",
802 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
803 ))
804
805 factory.addStep(ShellCommand(
806 name = "defconfig",
807 description = "Populating .config",
808 command = ["make", "defconfig"],
809 env = MakeEnv(),
810 ))
811
812 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
813 factory.addStep(ShellCommand(
814 name = "checkarch",
815 description = "Checking architecture",
816 descriptionDone = "Architecture validated",
817 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
818 logEnviron = False,
819 want_stdout = False,
820 want_stderr = False,
821 haltOnFailure = True,
822 flunkOnFailure = False, # this is not a build FAILURE - TODO mark build as SKIPPED
823 ))
824
825 # find libc suffix
826 factory.addStep(SetPropertyFromCommand(
827 name = "libc",
828 property = "libc",
829 description = "Finding libc suffix",
830 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
831 ))
832
833 # install build key
834 factory.addStep(StringDownload(
835 name = "dlkeybuildpub",
836 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
837 workerdest = "key-build.pub",
838 mode = 0o600,
839 doStepIf = IsUsignEnabled,
840 ))
841
842 factory.addStep(StringDownload(
843 name = "dlkeybuild",
844 s = "# fake private key",
845 workerdest = "key-build",
846 mode = 0o600,
847 doStepIf = IsUsignEnabled,
848 ))
849
850 factory.addStep(StringDownload(
851 name = "dlkeybuilducert",
852 s = "# fake certificate",
853 workerdest = "key-build.ucert",
854 mode = 0o600,
855 doStepIf = IsUsignEnabled,
856 ))
857
858 # prepare dl
859 factory.addStep(ShellCommand(
860 name = "dldir",
861 description = "Preparing dl/",
862 descriptionDone = "dl/ prepared",
863 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
864 workdir = Property("builddir"),
865 logEnviron = False,
866 want_stdout = False,
867 ))
868
869 # cleanup dl
870 factory.addStep(ShellCommand(
871 name = "dlprune",
872 description = "Pruning dl/",
873 descriptionDone = "dl/ pruned",
874 command = 'find dl/ -atime +15 -delete -print',
875 logEnviron = False,
876 ))
877
878 # prepare tar
879 factory.addStep(ShellCommand(
880 name = "dltar",
881 description = "Building and installing GNU tar",
882 descriptionDone = "GNU tar built and installed",
883 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
884 env = MakeEnv(tryccache=True),
885 haltOnFailure = True,
886 ))
887
888 # populate dl
889 factory.addStep(ShellCommand(
890 name = "dlrun",
891 description = "Populating dl/",
892 descriptionDone = "dl/ populated",
893 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
894 env = MakeEnv(),
895 logEnviron = False,
896 locks = NetLockDl,
897 ))
898
899 factory.addStep(ShellCommand(
900 name = "cleanbase",
901 description = "Cleaning base-files",
902 command=["make", "package/base-files/clean", "V=s"],
903 ))
904
905 # build
906 factory.addStep(ShellCommand(
907 name = "tools",
908 description = "Building and installing tools",
909 descriptionDone = "Tools built and installed",
910 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
911 env = MakeEnv(tryccache=True),
912 haltOnFailure = True,
913 ))
914
915 factory.addStep(ShellCommand(
916 name = "toolchain",
917 description = "Building and installing toolchain",
918 descriptionDone = "Toolchain built and installed",
919 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
920 env = MakeEnv(),
921 haltOnFailure = True,
922 ))
923
924 factory.addStep(ShellCommand(
925 name = "kmods",
926 description = "Building kmods",
927 descriptionDone = "Kmods built",
928 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
929 env = MakeEnv(),
930 haltOnFailure = True,
931 ))
932
933 # find kernel version
934 factory.addStep(SetPropertyFromCommand(
935 name = "kernelversion",
936 property = "kernelversion",
937 description = "Finding the effective Kernel version",
938 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
939 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
940 ))
941
942 factory.addStep(ShellCommand(
943 name = "pkgclean",
944 description = "Cleaning up package build",
945 descriptionDone = "Package build cleaned up",
946 command=["make", "package/cleanup", "V=s"],
947 ))
948
949 factory.addStep(ShellCommand(
950 name = "pkgbuild",
951 description = "Building packages",
952 descriptionDone = "Packages built",
953 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
954 env = MakeEnv(),
955 haltOnFailure = True,
956 ))
957
958 factory.addStep(ShellCommand(
959 name = "pkginstall",
960 description = "Installing packages",
961 descriptionDone = "Packages installed",
962 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
963 env = MakeEnv(),
964 haltOnFailure = True,
965 ))
966
967 factory.addStep(ShellCommand(
968 name = "pkgindex",
969 description = "Indexing packages",
970 descriptionDone = "Packages indexed",
971 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
972 env = MakeEnv(),
973 haltOnFailure = True,
974 ))
975
976 factory.addStep(ShellCommand(
977 name = "images",
978 description = "Building and installing images",
979 descriptionDone = "Images built and installed",
980 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
981 env = MakeEnv(),
982 haltOnFailure = True,
983 ))
984
985 factory.addStep(ShellCommand(
986 name = "buildinfo",
987 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
988 command = "make -j1 buildinfo V=s || true",
989 env = MakeEnv(),
990 haltOnFailure = True,
991 ))
992
993 factory.addStep(ShellCommand(
994 name = "json_overview_image_info",
995 description = "Generating profiles.json in target folder",
996 command = "make -j1 json_overview_image_info V=s || true",
997 env = MakeEnv(),
998 haltOnFailure = True,
999 ))
1000
1001 factory.addStep(ShellCommand(
1002 name = "checksums",
1003 description = "Calculating checksums",
1004 descriptionDone = "Checksums calculated",
1005 command=["make", "-j1", "checksum", "V=s"],
1006 env = MakeEnv(),
1007 haltOnFailure = True,
1008 ))
1009
1010 factory.addStep(ShellCommand(
1011 name = "kmoddir",
1012 descriptionDone = "Kmod directory created",
1013 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1014 haltOnFailure = True,
1015 doStepIf = IsKmodArchiveEnabled,
1016 ))
1017
1018 factory.addStep(ShellCommand(
1019 name = "kmodprepare",
1020 description = "Preparing kmod archive",
1021 descriptionDone = "Kmod archive prepared",
1022 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1023 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1024 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1025 haltOnFailure = True,
1026 doStepIf = IsKmodArchiveEnabled,
1027 ))
1028
1029 factory.addStep(ShellCommand(
1030 name = "kmodindex",
1031 description = "Indexing kmod archive",
1032 descriptionDone = "Kmod archive indexed",
1033 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1034 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1035 env = MakeEnv(),
1036 haltOnFailure = True,
1037 doStepIf = IsKmodArchiveEnabled,
1038 ))
1039
1040 # sign
1041 factory.addStep(MasterShellCommand(
1042 name = "signprepare",
1043 descriptionDone = "Temporary signing directory prepared",
1044 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1045 haltOnFailure = True,
1046 doStepIf = IsSignEnabled,
1047
1048 ))
1049
1050 factory.addStep(ShellCommand(
1051 name = "signpack",
1052 description = "Packing files to sign",
1053 descriptionDone = "Files to sign packed",
1054 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1055 haltOnFailure = True,
1056 doStepIf = IsSignEnabled,
1057 ))
1058
1059 factory.addStep(FileUpload(
1060 workersrc = "sign.tar.gz",
1061 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1062 haltOnFailure = True,
1063 doStepIf = IsSignEnabled,
1064 ))
1065
1066 factory.addStep(MasterShellCommand(
1067 name = "signfiles",
1068 description = "Signing files",
1069 descriptionDone = "Files signed",
1070 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
1071 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1072 haltOnFailure = True,
1073 doStepIf = IsSignEnabled,
1074 ))
1075
1076 factory.addStep(FileDownload(
1077 name = "dlsigntargz",
1078 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1079 workerdest = "sign.tar.gz",
1080 haltOnFailure = True,
1081 doStepIf = IsSignEnabled,
1082 ))
1083
1084 factory.addStep(ShellCommand(
1085 name = "signunpack",
1086 description = "Unpacking signed files",
1087 descriptionDone = "Signed files unpacked",
1088 command = ["tar", "-xzf", "sign.tar.gz"],
1089 haltOnFailure = True,
1090 doStepIf = IsSignEnabled,
1091 ))
1092
1093 # upload
1094 factory.addStep(ShellCommand(
1095 name = "dirprepare",
1096 descriptionDone = "Upload directory structure prepared",
1097 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1098 haltOnFailure = True,
1099 ))
1100
1101 factory.addStep(ShellCommand(
1102 name = "linkprepare",
1103 descriptionDone = "Repository symlink prepared",
1104 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1105 doStepIf = IsNoMasterBuild,
1106 haltOnFailure = True,
1107 ))
1108
1109 factory.addStep(ShellCommand(
1110 name = "kmoddirprepare",
1111 descriptionDone = "Kmod archive upload directory prepared",
1112 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1113 haltOnFailure = True,
1114 doStepIf = IsKmodArchiveEnabled,
1115 ))
1116
1117 factory.addStep(ShellCommand(
1118 name = "dirupload",
1119 description = "Uploading directory structure",
1120 descriptionDone = "Directory structure uploaded",
1121 command = ["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1122 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1123 haltOnFailure = True,
1124 logEnviron = False,
1125 locks = NetLockUl,
1126 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1127 ))
1128
1129 # download remote sha256sums to 'target-sha256sums'
1130 factory.addStep(ShellCommand(
1131 name = "target-sha256sums",
1132 description = "Fetching remote sha256sums for target",
1133 descriptionDone = "Remote sha256sums for target fetched",
1134 command = ["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1135 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1136 logEnviron = False,
1137 haltOnFailure = False,
1138 flunkOnFailure = False,
1139 warnOnFailure = False,
1140 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1141 ))
1142
1143 # build list of files to upload
1144 factory.addStep(FileDownload(
1145 name = "dlsha2rsyncpl",
1146 mastersrc = scripts_dir + '/sha2rsync.pl',
1147 workerdest = "../sha2rsync.pl",
1148 mode = 0o755,
1149 ))
1150
1151 factory.addStep(ShellCommand(
1152 name = "buildlist",
1153 description = "Building list of files to upload",
1154 descriptionDone = "List of files to upload built",
1155 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1156 haltOnFailure = True,
1157 ))
1158
1159 factory.addStep(FileDownload(
1160 name = "dlrsync.sh",
1161 mastersrc = scripts_dir + '/rsync.sh',
1162 workerdest = "../rsync.sh",
1163 mode = 0o755,
1164 ))
1165
1166 # upload new files and update existing ones
1167 factory.addStep(ShellCommand(
1168 name = "targetupload",
1169 description = "Uploading target files",
1170 descriptionDone = "Target files uploaded",
1171 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1172 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1173 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1174 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1175 haltOnFailure = True,
1176 logEnviron = False,
1177 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1178 ))
1179
1180 # delete files which don't exist locally
1181 factory.addStep(ShellCommand(
1182 name = "targetprune",
1183 description = "Pruning target files",
1184 descriptionDone = "Target files pruned",
1185 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1186 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1187 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1188 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1189 haltOnFailure = True,
1190 logEnviron = False,
1191 locks = NetLockUl,
1192 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1193 ))
1194
1195 factory.addStep(ShellCommand(
1196 name = "kmodupload",
1197 description = "Uploading kmod archive",
1198 descriptionDone = "Kmod archive uploaded",
1199 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1200 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1201 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1202 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1203 haltOnFailure = True,
1204 logEnviron = False,
1205 locks = NetLockUl,
1206 doStepIf = IsKmodArchiveAndRsyncEnabled,
1207 ))
1208
1209 factory.addStep(ShellCommand(
1210 name = "sourcelist",
1211 description = "Finding source archives to upload",
1212 descriptionDone = "Source archives to upload found",
1213 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1214 haltOnFailure = True,
1215 ))
1216
1217 factory.addStep(ShellCommand(
1218 name = "sourceupload",
1219 description = "Uploading source archives",
1220 descriptionDone = "Source archives uploaded",
1221 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1222 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), Interpolate("-a%(prop:rsync_ipv4:+4)s"), "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1223 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1224 haltOnFailure = True,
1225 logEnviron = False,
1226 locks = NetLockUl,
1227 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1228 ))
1229
1230 factory.addStep(ShellCommand(
1231 name = "df",
1232 description = "Reporting disk usage",
1233 command=["df", "-h", "."],
1234 env={'LC_ALL': 'C'},
1235 logEnviron = False,
1236 haltOnFailure = False,
1237 flunkOnFailure = False,
1238 warnOnFailure = False,
1239 alwaysRun = True,
1240 ))
1241
1242 factory.addStep(ShellCommand(
1243 name = "du",
1244 description = "Reporting estimated file space usage",
1245 command=["du", "-sh", "."],
1246 env={'LC_ALL': 'C'},
1247 logEnviron = False,
1248 haltOnFailure = False,
1249 flunkOnFailure = False,
1250 warnOnFailure = False,
1251 alwaysRun = True,
1252 ))
1253
1254 factory.addStep(ShellCommand(
1255 name = "ccachestat",
1256 description = "Reporting ccache stats",
1257 command=["ccache", "-s"],
1258 logEnviron = False,
1259 want_stderr = False,
1260 haltOnFailure = False,
1261 flunkOnFailure = False,
1262 warnOnFailure = False,
1263 doStepIf = util.Transform(bool, Property("ccache_command")),
1264 ))
1265
1266 for brname in branchNames:
1267 bldrname = brname + "_" + target
1268 c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, tags=[brname,], nextBuild=GetNextBuild, canStartBuild=canStartBuild))
1269
1270
1271 ####### STATUS TARGETS
1272
1273 # 'status' is a list of Status Targets. The results of each build will be
1274 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1275 # including web pages, email senders, and IRC bots.
1276
1277 if "status_bind" in inip1:
1278 c['www'] = {
1279 'port': inip1.get("status_bind"),
1280 'plugins': {
1281 'waterfall_view': True,
1282 'console_view': True,
1283 'grid_view': True
1284 }
1285 }
1286
1287 if "status_user" in inip1 and "status_password" in inip1:
1288 c['www']['auth'] = util.UserPasswordAuth([
1289 (inip1.get("status_user"), inip1.get("status_password"))
1290 ])
1291 c['www']['authz'] = util.Authz(
1292 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1293 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1294 )
1295
1296 c['services'] = []
1297 if ini.has_section("irc"):
1298 iniirc = ini['irc']
1299 irc_host = iniirc.get("host", None)
1300 irc_port = iniirc.getint("port", 6667)
1301 irc_chan = iniirc.get("channel", None)
1302 irc_nick = iniirc.get("nickname", None)
1303 irc_pass = iniirc.get("password", None)
1304
1305 if irc_host and irc_nick and irc_chan:
1306 irc = reporters.IRC(irc_host, irc_nick,
1307 port = irc_port,
1308 password = irc_pass,
1309 channels = [ irc_chan ],
1310 notify_events = [ 'exception', 'problem', 'recovery' ]
1311 )
1312
1313 c['services'].append(irc)
1314
1315 c['revlink'] = util.RevlinkMatch([
1316 r'https://git.openwrt.org/openwrt/(.*).git'
1317 ],
1318 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1319
1320 ####### DB URL
1321
1322 c['db'] = {
1323 # This specifies what database buildbot uses to store its state. You can leave
1324 # this at its default for all but the largest installations.
1325 'db_url' : "sqlite:///state.sqlite",
1326 }
1327
1328 c['buildbotNetUsageData'] = None