phase1: actually make rsync -4 configurable
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64
65 rsync_defopts = ["-v", "--timeout=120"]
66
67 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72 def ini_parse_branch(section):
73 b = {}
74 name = section.get("name")
75
76 if not name:
77 raise ValueError("missing 'name' in " + repr(section))
78 if name in branches:
79 raise ValueError("duplicate branch name in " + repr(section))
80
81 b["name"] = name
82 b["bin_url"] = section.get("binary_url")
83 b["bin_key"] = section.get("binary_password")
84
85 b["src_url"] = section.get("source_url")
86 b["src_key"] = section.get("source_password")
87
88 b["gpg_key"] = section.get("gpg_key")
89
90 b["usign_key"] = section.get("usign_key")
91 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
92 b["usign_comment"] = section.get("usign_comment", usign_comment)
93
94 b["config_seed"] = section.get("config_seed")
95
96 b["kmod_archive"] = section.getboolean("kmod_archive", False)
97
98 branches[name] = b
99 log.msg("Configured branch: {}".format(name))
100
101 # PB port can be either a numeric port or a connection string
102 pb_port = inip1.get("port") or 9989
103
104 # This is the dictionary that the buildmaster pays attention to. We also use
105 # a shorter alias to save typing.
106 c = BuildmasterConfig = {}
107
108 ####### PROJECT IDENTITY
109
110 # the 'title' string will appear at the top of this buildbot
111 # installation's html.WebStatus home page (linked to the
112 # 'titleURL') and is embedded in the title of the waterfall HTML page.
113
114 c['title'] = ini['general'].get("title")
115 c['titleURL'] = ini['general'].get("title_url")
116
117 # the 'buildbotURL' string should point to the location where the buildbot's
118 # internal web server (usually the html.WebStatus page) is visible. This
119 # typically uses the port number set in the Waterfall 'status' entry, but
120 # with an externally-visible host name which the buildbot cannot figure out
121 # without some help.
122
123 c['buildbotURL'] = inip1.get("buildbot_url")
124
125 ####### BUILDWORKERS
126
127 # The 'workers' list defines the set of recognized buildworkers. Each element is
128 # a Worker object, specifying a unique worker name and password. The same
129 # worker name and password must be configured on the worker.
130
131 c['workers'] = []
132 NetLocks = dict()
133
134 def ini_parse_workers(section):
135 name = section.get("name")
136 password = section.get("password")
137 phase = section.getint("phase")
138 tagonly = section.getboolean("tag_only")
139 rsyncipv4 = section.getboolean("rsync_ipv4")
140
141 if not name or not password or not phase == 1:
142 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
143 return
144
145 sl_props = { 'tag_only':tagonly }
146 if "dl_lock" in section:
147 lockname = section.get("dl_lock")
148 sl_props['dl_lock'] = lockname
149 if lockname not in NetLocks:
150 NetLocks[lockname] = locks.MasterLock(lockname)
151 if "ul_lock" in section:
152 lockname = section.get("ul_lock")
153 sl_props['ul_lock'] = lockname
154 if lockname not in NetLocks:
155 NetLocks[lockname] = locks.MasterLock(lockname)
156 if rsyncipv4:
157 sl_props['rsync_ipv4'] = True # only set prop if required, we use '+' Interpolate substitution
158
159 log.msg("Configured worker: {}".format(name))
160 # NB: phase1 build factory requires workers to be single-build only
161 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
162
163
164 for section in ini.sections():
165 if section.startswith("branch "):
166 ini_parse_branch(ini[section])
167
168 if section.startswith("worker "):
169 ini_parse_workers(ini[section])
170
171 # list of branches in build-priority order
172 branchNames = [branches[b]["name"] for b in branches]
173
174 c['protocols'] = {'pb': {'port': pb_port}}
175
176 # coalesce builds
177 c['collapseRequests'] = True
178
179 # Reduce amount of backlog data
180 c['configurators'] = [util.JanitorConfigurator(
181 logHorizon=timedelta(days=3),
182 hour=6,
183 )]
184
185 @defer.inlineCallbacks
186 def getNewestCompleteTime(bldr):
187 """Returns the complete_at of the latest completed and not SKIPPED
188 build request for this builder, or None if there are no such build
189 requests. We need to filter out SKIPPED requests because we're
190 using collapseRequests=True which is unfortunately marking all
191 previous requests as complete when new buildset is created.
192
193 @returns: datetime instance or None, via Deferred
194 """
195
196 bldrid = yield bldr.getBuilderId()
197 completed = yield bldr.master.data.get(
198 ('builders', bldrid, 'buildrequests'),
199 [
200 resultspec.Filter('complete', 'eq', [True]),
201 resultspec.Filter('results', 'ne', [results.SKIPPED]),
202 ],
203 order=['-complete_at'], limit=1)
204 if not completed:
205 return
206
207 complete_at = completed[0]['complete_at']
208
209 last_build = yield bldr.master.data.get(
210 ('builds', ),
211 [
212 resultspec.Filter('builderid', 'eq', [bldrid]),
213 ],
214 order=['-started_at'], limit=1)
215
216 if last_build and last_build[0]:
217 last_complete_at = last_build[0]['complete_at']
218 if last_complete_at and (last_complete_at > complete_at):
219 return last_complete_at
220
221 return complete_at
222
223 @defer.inlineCallbacks
224 def prioritizeBuilders(master, builders):
225 """Returns sorted list of builders by their last timestamp of completed and
226 not skipped build, ordered first by branch name.
227
228 @returns: list of sorted builders
229 """
230
231 bldrNamePrio = { "__Janitor": 0, "00_force_build": 0 }
232 i = 1
233 for bname in branchNames:
234 bldrNamePrio[bname] = i
235 i += 1
236
237 def is_building(bldr):
238 return bool(bldr.building) or bool(bldr.old_building)
239
240 def bldr_info(bldr):
241 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
242 d.addCallback(lambda complete_at: (complete_at, bldr))
243 return d
244
245 def bldr_sort(item):
246 (complete_at, bldr) = item
247
248 pos = 99
249 for (name, prio) in bldrNamePrio.items():
250 if bldr.name.startswith(name):
251 pos = prio
252 break
253
254 if not complete_at:
255 date = datetime.min
256 complete_at = date.replace(tzinfo=tzutc())
257
258 if is_building(bldr):
259 date = datetime.max
260 complete_at = date.replace(tzinfo=tzutc())
261
262 return (pos, complete_at, bldr.name)
263
264 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
265 results.sort(key=bldr_sort)
266
267 #for r in results:
268 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
269
270 return [r[1] for r in results]
271
272 c['prioritizeBuilders'] = prioritizeBuilders
273
274 ####### CHANGESOURCES
275
276 # find targets
277 targets = set()
278
279 def populateTargets():
280 """ fetch a shallow clone of each configured branch in turn:
281 execute dump-target-info.pl and collate the results to ensure
282 targets that only exist in specific branches get built.
283 This takes a while during master startup but is executed only once.
284 """
285 log.msg("Populating targets, this will take time")
286 sourcegit = work_dir + '/source.git'
287 for branch in branchNames:
288 if os.path.isdir(sourcegit):
289 subprocess.call(["rm", "-rf", sourcegit])
290
291 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
292
293 os.makedirs(sourcegit + '/tmp', exist_ok=True)
294 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
295 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
296
297 while True:
298 line = findtargets.stdout.readline()
299 if not line:
300 break
301 ta = line.decode().strip().split(' ')
302 targets.add(ta[0])
303
304 subprocess.call(["rm", "-rf", sourcegit])
305
306 populateTargets()
307
308 # the 'change_source' setting tells the buildmaster how it should find out
309 # about source code changes.
310
311 c['change_source'] = []
312 c['change_source'].append(GitPoller(
313 repo_url,
314 workdir=work_dir+'/work.git', branches=branchNames,
315 pollAtLaunch=True, pollinterval=300))
316
317 ####### SCHEDULERS
318
319 # Configure the Schedulers, which decide how to react to incoming changes.
320
321 # Selector for known valid tags
322 class TagChoiceParameter(BaseParameter):
323 spec_attributes = ["strict", "choices"]
324 type = "list"
325 strict = True
326
327 def __init__(self, name, label=None, **kw):
328 super().__init__(name, label, **kw)
329 self._choice_list = []
330
331 def getRevTags(self, findtag=None):
332 taglist = []
333 branchvers = []
334
335 # we will filter out tags that do no match the configured branches
336 for b in branchNames:
337 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
338 if basever:
339 branchvers.append(basever[1])
340
341 # grab tags from remote repository
342 alltags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
345
346 while True:
347 line = alltags.stdout.readline()
348
349 if not line:
350 break
351
352 (rev, tag) = line.split()
353
354 # does it match known format? ('vNN.NN.NN(-rcN)')
355 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
356
357 # only list valid tags matching configured branches
358 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
359 # if we want a specific tag, ignore all that don't match
360 if findtag and findtag != tagver[1]:
361 continue
362 taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]})
363
364 return taglist
365
366 @property
367 def choices(self):
368 taglist = [rt['tag'] for rt in self.getRevTags()]
369 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
370 taglist.insert(0, '')
371
372 self._choice_list = taglist
373
374 return self._choice_list
375
376 def updateFromKwargs(self, properties, kwargs, **unused):
377 tag = self.getFromKwargs(kwargs)
378 properties[self.name] = tag
379
380 # find the commit matching the tag
381 findtag = self.getRevTags(tag)
382
383 if not findtag:
384 raise ValidationError("Couldn't find tag")
385
386 properties['force_revision'] = findtag[0]['rev']
387
388 # find the branch matching the tag
389 branch = None
390 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
391 for b in branchNames:
392 if b.endswith(branchver[1]):
393 branch = b
394
395 if not branch:
396 raise ValidationError("Couldn't find branch")
397
398 properties['force_branch'] = branch
399
400 def parse_from_arg(self, s):
401 if self.strict and s not in self._choice_list:
402 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
403 return s
404
405 @util.renderer
406 @defer.inlineCallbacks
407 def builderNames(props):
408 """ since we have per branch and per target builders,
409 address the relevant builder for each new buildrequest
410 based on the request's desired branch and target.
411 """
412 branch = props.getProperty("branch")
413 target = props.getProperty("target", "")
414
415 if target == "all":
416 target = ""
417
418 # if that didn't work, try sourcestamp to find a branch
419 if not branch:
420 # match builders with target branch
421 ss = props.sourcestamps[0]
422 if ss:
423 branch = ss['branch']
424 else:
425 log.msg("couldn't find builder")
426 return [] # nothing works
427
428 bname = branch + "_" + target
429 builders = []
430
431 for b in (yield props.master.data.get(('builders',))):
432 if not b['name'].startswith(bname):
433 continue
434 builders.append(b['name'])
435
436 return builders
437
438 c['schedulers'] = []
439 c['schedulers'].append(AnyBranchScheduler(
440 name = "all",
441 change_filter = util.ChangeFilter(branch=branchNames),
442 treeStableTimer = 15*60,
443 builderNames = builderNames))
444
445 c['schedulers'].append(ForceScheduler(
446 name = "force",
447 buttonName = "Force builds",
448 label = "Force build details",
449 builderNames = [ "00_force_build" ],
450
451 codebases = [
452 util.CodebaseParameter(
453 "",
454 label = "Repository",
455 branch = util.FixedParameter(name = "branch", default = ""),
456 revision = util.FixedParameter(name = "revision", default = ""),
457 repository = util.FixedParameter(name = "repository", default = ""),
458 project = util.FixedParameter(name = "project", default = "")
459 )
460 ],
461
462 reason = util.StringParameter(
463 name = "reason",
464 label = "Reason",
465 default = "Trigger build",
466 required = True,
467 size = 80
468 ),
469
470 properties = [
471 # NB: avoid nesting to simplify processing of properties
472 util.ChoiceStringParameter(
473 name = "target",
474 label = "Build target",
475 default = "all",
476 choices = [ "all" ] + list(targets)
477 ),
478 TagChoiceParameter(
479 name = "tag",
480 label = "Build tag",
481 default = ""
482 )
483 ]
484 ))
485
486 c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames))
487
488 ####### BUILDERS
489
490 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
491 # what steps, and which workers can execute them. Note that any particular build will
492 # only take place on one worker.
493
494 def IsNoMasterBuild(step):
495 return step.getProperty("branch") != "master"
496
497 def IsUsignEnabled(step):
498 branch = step.getProperty("branch")
499 return branch and branches[branch].get("usign_key")
500
501 def IsSignEnabled(step):
502 branch = step.getProperty("branch")
503 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
504
505 def IsKmodArchiveEnabled(step):
506 branch = step.getProperty("branch")
507 return branch and branches[branch].get("kmod_archive")
508
509 def IsKmodArchiveAndRsyncEnabled(step):
510 branch = step.getProperty("branch")
511 return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
512
513 def GetBaseVersion(branch):
514 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
515 return branch.split('-')[1]
516 else:
517 return "master"
518
519 @properties.renderer
520 def GetVersionPrefix(props):
521 branch = props.getProperty("branch")
522 basever = GetBaseVersion(branch)
523 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
524 return "%s/" % props["tag"][1:]
525 elif basever != "master":
526 return "%s-SNAPSHOT/" % basever
527 else:
528 return ""
529
530 @util.renderer
531 def GetConfigSeed(props):
532 branch = props.getProperty("branch")
533 return branch and branches[branch].get("config_seed") or ""
534
535 @util.renderer
536 def GetRsyncParams(props, srcorbin, urlorkey):
537 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
538 branch = props.getProperty("branch")
539 opt = srcorbin + "_" + urlorkey
540 return branch and branches[branch].get(opt)
541
542 @util.renderer
543 def GetUsignKey(props):
544 branch = props.getProperty("branch")
545 return branch and branches[branch].get("usign_key")
546
547 def GetNextBuild(builder, requests):
548 for r in requests:
549 if r.properties:
550 # order tagged build first
551 if r.properties.hasProperty("tag"):
552 return r
553
554 r = requests[0]
555 #log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
556 return r
557
558 def MakeEnv(overrides=None, tryccache=False):
559 env = {
560 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
561 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
562 }
563 if tryccache:
564 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
565 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
566 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
567 else:
568 env['CC'] = env['CCC']
569 env['CXX'] = env['CCXX']
570 env['CCACHE'] = ''
571 if overrides is not None:
572 env.update(overrides)
573 return env
574
575 @properties.renderer
576 def NetLockDl(props, extralock=None):
577 lock = None
578 if props.hasProperty("dl_lock"):
579 lock = NetLocks[props["dl_lock"]]
580 if lock is not None:
581 return [lock.access('exclusive')]
582 else:
583 return []
584
585 @properties.renderer
586 def NetLockUl(props):
587 lock = None
588 if props.hasProperty("ul_lock"):
589 lock = NetLocks[props["ul_lock"]]
590 if lock is not None:
591 return [lock.access('exclusive')]
592 else:
593 return []
594
595 def IsTargetSelected(target):
596 def CheckTargetProperty(step):
597 selected_target = step.getProperty("target", "all")
598 if selected_target != "all" and selected_target != target:
599 return False
600 return True
601
602 return CheckTargetProperty
603
604 @util.renderer
605 def UsignSec2Pub(props):
606 branch = props.getProperty("branch")
607 try:
608 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
609 seckey = branches[branch].get("usign_key")
610 seckey = base64.b64decode(seckey)
611 except:
612 return None
613
614 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
615 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
616
617
618 def canStartBuild(builder, wfb, request):
619 """ filter out non tag requests for tag_only workers. """
620 wtagonly = wfb.worker.properties.getProperty('tag_only')
621 tag = request.properties.getProperty('tag')
622
623 if wtagonly and not tag:
624 return False
625
626 return True
627
628 c['builders'] = []
629
630 workerNames = [ ]
631
632 for worker in c['workers']:
633 workerNames.append(worker.workername)
634
635 # add a single LocalWorker to handle the forcebuild builder
636 c['workers'].append(LocalWorker("__local_force_build", max_builds=1))
637
638 force_factory = BuildFactory()
639 force_factory.addStep(steps.Trigger(
640 name = "trigger_build",
641 schedulerNames = [ "trigger" ],
642 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
643 set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") },
644 ))
645
646 c['builders'].append(BuilderConfig(
647 name = "00_force_build",
648 workername = "__local_force_build",
649 factory = force_factory))
650
651
652 # NB the phase1 build factory assumes workers are single-build only
653 for target in targets:
654 ts = target.split('/')
655
656 factory = BuildFactory()
657
658 # setup shared work directory if required
659 factory.addStep(ShellCommand(
660 name = "sharedwd",
661 descriptionDone = "Shared work directory set up",
662 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
663 workdir = ".",
664 haltOnFailure = True,
665 ))
666
667 # find number of cores
668 factory.addStep(SetPropertyFromCommand(
669 name = "nproc",
670 property = "nproc",
671 description = "Finding number of CPUs",
672 command = ["nproc"],
673 ))
674
675 # find gcc and g++ compilers
676 factory.addStep(FileDownload(
677 name = "dlfindbinpl",
678 mastersrc = scripts_dir + '/findbin.pl',
679 workerdest = "../findbin.pl",
680 mode = 0o755,
681 ))
682
683 factory.addStep(SetPropertyFromCommand(
684 name = "gcc",
685 property = "cc_command",
686 description = "Finding gcc command",
687 command = ["../findbin.pl", "gcc", "", ""],
688 haltOnFailure = True,
689 ))
690
691 factory.addStep(SetPropertyFromCommand(
692 name = "g++",
693 property = "cxx_command",
694 description = "Finding g++ command",
695 command = ["../findbin.pl", "g++", "", ""],
696 haltOnFailure = True,
697 ))
698
699 # see if ccache is available
700 factory.addStep(SetPropertyFromCommand(
701 name = "ccache",
702 property = "ccache_command",
703 description = "Testing for ccache command",
704 command = ["which", "ccache"],
705 haltOnFailure = False,
706 flunkOnFailure = False,
707 warnOnFailure = False,
708 hideStepIf = lambda r, s: r==results.FAILURE,
709 ))
710
711 # check out the source
712 # Git() runs:
713 # if repo doesn't exist: 'git clone repourl'
714 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
715 # git cat-file -e <commit>
716 # git checkout -f <commit>
717 # git checkout -B <branch>
718 # git rev-parse HEAD
719 factory.addStep(Git(
720 name = "git",
721 repourl = repo_url,
722 mode = 'full',
723 method = 'fresh',
724 locks = NetLockDl,
725 haltOnFailure = True,
726 ))
727
728 # update remote refs
729 factory.addStep(ShellCommand(
730 name = "fetchrefs",
731 description = "Fetching Git remote refs",
732 descriptionDone = "Git remote refs fetched",
733 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
734 haltOnFailure = True,
735 ))
736
737 # Verify that Git HEAD points to a tag or branch
738 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
739 factory.addStep(ShellCommand(
740 name = "gitverify",
741 description = "Ensuring that Git HEAD is pointing to a branch or tag",
742 descriptionDone = "Git HEAD is sane",
743 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
744 haltOnFailure = True,
745 ))
746
747 factory.addStep(ShellCommand(
748 name = "rmtmp",
749 description = "Remove tmp folder",
750 command=["rm", "-rf", "tmp/"],
751 ))
752
753 # feed
754 factory.addStep(ShellCommand(
755 name = "rmfeedlinks",
756 description = "Remove feed symlinks",
757 command=["rm", "-rf", "package/feeds/"],
758 ))
759
760 factory.addStep(StringDownload(
761 name = "ccachecc",
762 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
763 workerdest = "../ccache_cc.sh",
764 mode = 0o755,
765 ))
766
767 factory.addStep(StringDownload(
768 name = "ccachecxx",
769 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
770 workerdest = "../ccache_cxx.sh",
771 mode = 0o755,
772 ))
773
774 # feed
775 factory.addStep(ShellCommand(
776 name = "updatefeeds",
777 description = "Updating feeds",
778 command=["./scripts/feeds", "update"],
779 env = MakeEnv(tryccache=True),
780 haltOnFailure = True,
781 locks = NetLockDl,
782 ))
783
784 # feed
785 factory.addStep(ShellCommand(
786 name = "installfeeds",
787 description = "Installing feeds",
788 command=["./scripts/feeds", "install", "-a"],
789 env = MakeEnv(tryccache=True),
790 haltOnFailure = True,
791 ))
792
793 # seed config
794 factory.addStep(StringDownload(
795 name = "dlconfigseed",
796 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
797 workerdest = ".config",
798 mode = 0o644,
799 ))
800
801 # configure
802 factory.addStep(ShellCommand(
803 name = "newconfig",
804 descriptionDone = ".config seeded",
805 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
806 ))
807
808 factory.addStep(ShellCommand(
809 name = "delbin",
810 description = "Removing output directory",
811 command = ["rm", "-rf", "bin/"],
812 ))
813
814 factory.addStep(ShellCommand(
815 name = "defconfig",
816 description = "Populating .config",
817 command = ["make", "defconfig"],
818 env = MakeEnv(),
819 ))
820
821 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
822 factory.addStep(ShellCommand(
823 name = "checkarch",
824 description = "Checking architecture",
825 descriptionDone = "Architecture validated",
826 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
827 logEnviron = False,
828 want_stdout = False,
829 want_stderr = False,
830 haltOnFailure = True,
831 flunkOnFailure = False, # this is not a build FAILURE - TODO mark build as SKIPPED
832 ))
833
834 # find libc suffix
835 factory.addStep(SetPropertyFromCommand(
836 name = "libc",
837 property = "libc",
838 description = "Finding libc suffix",
839 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
840 ))
841
842 # install build key
843 factory.addStep(StringDownload(
844 name = "dlkeybuildpub",
845 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
846 workerdest = "key-build.pub",
847 mode = 0o600,
848 doStepIf = IsUsignEnabled,
849 ))
850
851 factory.addStep(StringDownload(
852 name = "dlkeybuild",
853 s = "# fake private key",
854 workerdest = "key-build",
855 mode = 0o600,
856 doStepIf = IsUsignEnabled,
857 ))
858
859 factory.addStep(StringDownload(
860 name = "dlkeybuilducert",
861 s = "# fake certificate",
862 workerdest = "key-build.ucert",
863 mode = 0o600,
864 doStepIf = IsUsignEnabled,
865 ))
866
867 # prepare dl
868 factory.addStep(ShellCommand(
869 name = "dldir",
870 description = "Preparing dl/",
871 descriptionDone = "dl/ prepared",
872 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
873 workdir = Property("builddir"),
874 logEnviron = False,
875 want_stdout = False,
876 ))
877
878 # cleanup dl
879 factory.addStep(ShellCommand(
880 name = "dlprune",
881 description = "Pruning dl/",
882 descriptionDone = "dl/ pruned",
883 command = 'find dl/ -atime +15 -delete -print',
884 logEnviron = False,
885 ))
886
887 # prepare tar
888 factory.addStep(ShellCommand(
889 name = "dltar",
890 description = "Building and installing GNU tar",
891 descriptionDone = "GNU tar built and installed",
892 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
893 env = MakeEnv(tryccache=True),
894 haltOnFailure = True,
895 ))
896
897 # populate dl
898 factory.addStep(ShellCommand(
899 name = "dlrun",
900 description = "Populating dl/",
901 descriptionDone = "dl/ populated",
902 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
903 env = MakeEnv(),
904 logEnviron = False,
905 locks = NetLockDl,
906 ))
907
908 factory.addStep(ShellCommand(
909 name = "cleanbase",
910 description = "Cleaning base-files",
911 command=["make", "package/base-files/clean", "V=s"],
912 ))
913
914 # build
915 factory.addStep(ShellCommand(
916 name = "tools",
917 description = "Building and installing tools",
918 descriptionDone = "Tools built and installed",
919 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
920 env = MakeEnv(tryccache=True),
921 haltOnFailure = True,
922 ))
923
924 factory.addStep(ShellCommand(
925 name = "toolchain",
926 description = "Building and installing toolchain",
927 descriptionDone = "Toolchain built and installed",
928 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
929 env = MakeEnv(),
930 haltOnFailure = True,
931 ))
932
933 factory.addStep(ShellCommand(
934 name = "kmods",
935 description = "Building kmods",
936 descriptionDone = "Kmods built",
937 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
938 env = MakeEnv(),
939 haltOnFailure = True,
940 ))
941
942 # find kernel version
943 factory.addStep(SetPropertyFromCommand(
944 name = "kernelversion",
945 property = "kernelversion",
946 description = "Finding the effective Kernel version",
947 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
948 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
949 ))
950
951 factory.addStep(ShellCommand(
952 name = "pkgclean",
953 description = "Cleaning up package build",
954 descriptionDone = "Package build cleaned up",
955 command=["make", "package/cleanup", "V=s"],
956 ))
957
958 factory.addStep(ShellCommand(
959 name = "pkgbuild",
960 description = "Building packages",
961 descriptionDone = "Packages built",
962 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
963 env = MakeEnv(),
964 haltOnFailure = True,
965 ))
966
967 factory.addStep(ShellCommand(
968 name = "pkginstall",
969 description = "Installing packages",
970 descriptionDone = "Packages installed",
971 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
972 env = MakeEnv(),
973 haltOnFailure = True,
974 ))
975
976 factory.addStep(ShellCommand(
977 name = "pkgindex",
978 description = "Indexing packages",
979 descriptionDone = "Packages indexed",
980 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
981 env = MakeEnv(),
982 haltOnFailure = True,
983 ))
984
985 factory.addStep(ShellCommand(
986 name = "images",
987 description = "Building and installing images",
988 descriptionDone = "Images built and installed",
989 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
990 env = MakeEnv(),
991 haltOnFailure = True,
992 ))
993
994 factory.addStep(ShellCommand(
995 name = "buildinfo",
996 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
997 command = "make -j1 buildinfo V=s || true",
998 env = MakeEnv(),
999 haltOnFailure = True,
1000 ))
1001
1002 factory.addStep(ShellCommand(
1003 name = "json_overview_image_info",
1004 description = "Generating profiles.json in target folder",
1005 command = "make -j1 json_overview_image_info V=s || true",
1006 env = MakeEnv(),
1007 haltOnFailure = True,
1008 ))
1009
1010 factory.addStep(ShellCommand(
1011 name = "checksums",
1012 description = "Calculating checksums",
1013 descriptionDone = "Checksums calculated",
1014 command=["make", "-j1", "checksum", "V=s"],
1015 env = MakeEnv(),
1016 haltOnFailure = True,
1017 ))
1018
1019 factory.addStep(ShellCommand(
1020 name = "kmoddir",
1021 descriptionDone = "Kmod directory created",
1022 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1023 haltOnFailure = True,
1024 doStepIf = IsKmodArchiveEnabled,
1025 ))
1026
1027 factory.addStep(ShellCommand(
1028 name = "kmodprepare",
1029 description = "Preparing kmod archive",
1030 descriptionDone = "Kmod archive prepared",
1031 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1032 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1033 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1034 haltOnFailure = True,
1035 doStepIf = IsKmodArchiveEnabled,
1036 ))
1037
1038 factory.addStep(ShellCommand(
1039 name = "kmodindex",
1040 description = "Indexing kmod archive",
1041 descriptionDone = "Kmod archive indexed",
1042 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1043 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1044 env = MakeEnv(),
1045 haltOnFailure = True,
1046 doStepIf = IsKmodArchiveEnabled,
1047 ))
1048
1049 # sign
1050 factory.addStep(MasterShellCommand(
1051 name = "signprepare",
1052 descriptionDone = "Temporary signing directory prepared",
1053 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1054 haltOnFailure = True,
1055 doStepIf = IsSignEnabled,
1056
1057 ))
1058
1059 factory.addStep(ShellCommand(
1060 name = "signpack",
1061 description = "Packing files to sign",
1062 descriptionDone = "Files to sign packed",
1063 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1064 haltOnFailure = True,
1065 doStepIf = IsSignEnabled,
1066 ))
1067
1068 factory.addStep(FileUpload(
1069 workersrc = "sign.tar.gz",
1070 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1071 haltOnFailure = True,
1072 doStepIf = IsSignEnabled,
1073 ))
1074
1075 factory.addStep(MasterShellCommand(
1076 name = "signfiles",
1077 description = "Signing files",
1078 descriptionDone = "Files signed",
1079 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
1080 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1081 haltOnFailure = True,
1082 doStepIf = IsSignEnabled,
1083 ))
1084
1085 factory.addStep(FileDownload(
1086 name = "dlsigntargz",
1087 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1088 workerdest = "sign.tar.gz",
1089 haltOnFailure = True,
1090 doStepIf = IsSignEnabled,
1091 ))
1092
1093 factory.addStep(ShellCommand(
1094 name = "signunpack",
1095 description = "Unpacking signed files",
1096 descriptionDone = "Signed files unpacked",
1097 command = ["tar", "-xzf", "sign.tar.gz"],
1098 haltOnFailure = True,
1099 doStepIf = IsSignEnabled,
1100 ))
1101
1102 # upload
1103 factory.addStep(ShellCommand(
1104 name = "dirprepare",
1105 descriptionDone = "Upload directory structure prepared",
1106 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1107 haltOnFailure = True,
1108 ))
1109
1110 factory.addStep(ShellCommand(
1111 name = "linkprepare",
1112 descriptionDone = "Repository symlink prepared",
1113 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1114 doStepIf = IsNoMasterBuild,
1115 haltOnFailure = True,
1116 ))
1117
1118 factory.addStep(ShellCommand(
1119 name = "kmoddirprepare",
1120 descriptionDone = "Kmod archive upload directory prepared",
1121 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1122 haltOnFailure = True,
1123 doStepIf = IsKmodArchiveEnabled,
1124 ))
1125
1126 factory.addStep(ShellCommand(
1127 name = "dirupload",
1128 description = "Uploading directory structure",
1129 descriptionDone = "Directory structure uploaded",
1130 command = ["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1131 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1132 haltOnFailure = True,
1133 logEnviron = False,
1134 locks = NetLockUl,
1135 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1136 ))
1137
1138 # download remote sha256sums to 'target-sha256sums'
1139 factory.addStep(ShellCommand(
1140 name = "target-sha256sums",
1141 description = "Fetching remote sha256sums for target",
1142 descriptionDone = "Remote sha256sums for target fetched",
1143 command = ["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1144 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1145 logEnviron = False,
1146 haltOnFailure = False,
1147 flunkOnFailure = False,
1148 warnOnFailure = False,
1149 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1150 ))
1151
1152 # build list of files to upload
1153 factory.addStep(FileDownload(
1154 name = "dlsha2rsyncpl",
1155 mastersrc = scripts_dir + '/sha2rsync.pl',
1156 workerdest = "../sha2rsync.pl",
1157 mode = 0o755,
1158 ))
1159
1160 factory.addStep(ShellCommand(
1161 name = "buildlist",
1162 description = "Building list of files to upload",
1163 descriptionDone = "List of files to upload built",
1164 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1165 haltOnFailure = True,
1166 ))
1167
1168 factory.addStep(FileDownload(
1169 name = "dlrsync.sh",
1170 mastersrc = scripts_dir + '/rsync.sh',
1171 workerdest = "../rsync.sh",
1172 mode = 0o755,
1173 ))
1174
1175 # upload new files and update existing ones
1176 factory.addStep(ShellCommand(
1177 name = "targetupload",
1178 description = "Uploading target files",
1179 descriptionDone = "Target files uploaded",
1180 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1181 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1182 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1183 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1184 haltOnFailure = True,
1185 logEnviron = False,
1186 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1187 ))
1188
1189 # delete files which don't exist locally
1190 factory.addStep(ShellCommand(
1191 name = "targetprune",
1192 description = "Pruning target files",
1193 descriptionDone = "Target files pruned",
1194 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1195 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1196 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1197 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1198 haltOnFailure = True,
1199 logEnviron = False,
1200 locks = NetLockUl,
1201 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1202 ))
1203
1204 factory.addStep(ShellCommand(
1205 name = "kmodupload",
1206 description = "Uploading kmod archive",
1207 descriptionDone = "Kmod archive uploaded",
1208 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1209 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1210 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1211 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1212 haltOnFailure = True,
1213 logEnviron = False,
1214 locks = NetLockUl,
1215 doStepIf = IsKmodArchiveAndRsyncEnabled,
1216 ))
1217
1218 factory.addStep(ShellCommand(
1219 name = "sourcelist",
1220 description = "Finding source archives to upload",
1221 descriptionDone = "Source archives to upload found",
1222 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1223 haltOnFailure = True,
1224 ))
1225
1226 factory.addStep(ShellCommand(
1227 name = "sourceupload",
1228 description = "Uploading source archives",
1229 descriptionDone = "Source archives uploaded",
1230 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1231 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), Interpolate("-a%(prop:rsync_ipv4:+4)s"), "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1232 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1233 haltOnFailure = True,
1234 logEnviron = False,
1235 locks = NetLockUl,
1236 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1237 ))
1238
1239 factory.addStep(ShellCommand(
1240 name = "df",
1241 description = "Reporting disk usage",
1242 command=["df", "-h", "."],
1243 env={'LC_ALL': 'C'},
1244 logEnviron = False,
1245 haltOnFailure = False,
1246 flunkOnFailure = False,
1247 warnOnFailure = False,
1248 alwaysRun = True,
1249 ))
1250
1251 factory.addStep(ShellCommand(
1252 name = "du",
1253 description = "Reporting estimated file space usage",
1254 command=["du", "-sh", "."],
1255 env={'LC_ALL': 'C'},
1256 logEnviron = False,
1257 haltOnFailure = False,
1258 flunkOnFailure = False,
1259 warnOnFailure = False,
1260 alwaysRun = True,
1261 ))
1262
1263 factory.addStep(ShellCommand(
1264 name = "ccachestat",
1265 description = "Reporting ccache stats",
1266 command=["ccache", "-s"],
1267 logEnviron = False,
1268 want_stderr = False,
1269 haltOnFailure = False,
1270 flunkOnFailure = False,
1271 warnOnFailure = False,
1272 doStepIf = util.Transform(bool, Property("ccache_command")),
1273 ))
1274
1275 for brname in branchNames:
1276 bldrname = brname + "_" + target
1277 c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, tags=[brname,], nextBuild=GetNextBuild, canStartBuild=canStartBuild))
1278
1279
1280 ####### STATUS TARGETS
1281
1282 # 'status' is a list of Status Targets. The results of each build will be
1283 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1284 # including web pages, email senders, and IRC bots.
1285
1286 if "status_bind" in inip1:
1287 c['www'] = {
1288 'port': inip1.get("status_bind"),
1289 'plugins': {
1290 'waterfall_view': True,
1291 'console_view': True,
1292 'grid_view': True
1293 }
1294 }
1295
1296 if "status_user" in inip1 and "status_password" in inip1:
1297 c['www']['auth'] = util.UserPasswordAuth([
1298 (inip1.get("status_user"), inip1.get("status_password"))
1299 ])
1300 c['www']['authz'] = util.Authz(
1301 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1302 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1303 )
1304
1305 c['services'] = []
1306 if ini.has_section("irc"):
1307 iniirc = ini['irc']
1308 irc_host = iniirc.get("host", None)
1309 irc_port = iniirc.getint("port", 6667)
1310 irc_chan = iniirc.get("channel", None)
1311 irc_nick = iniirc.get("nickname", None)
1312 irc_pass = iniirc.get("password", None)
1313
1314 if irc_host and irc_nick and irc_chan:
1315 irc = reporters.IRC(irc_host, irc_nick,
1316 port = irc_port,
1317 password = irc_pass,
1318 channels = [ irc_chan ],
1319 notify_events = [ 'exception', 'problem', 'recovery' ]
1320 )
1321
1322 c['services'].append(irc)
1323
1324 c['revlink'] = util.RevlinkMatch([
1325 r'https://git.openwrt.org/openwrt/(.*).git'
1326 ],
1327 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1328
1329 ####### DB URL
1330
1331 c['db'] = {
1332 # This specifies what database buildbot uses to store its state. You can leave
1333 # this at its default for all but the largest installations.
1334 'db_url' : "sqlite:///state.sqlite",
1335 }
1336
1337 c['buildbotNetUsageData'] = None