phase1: workaround gitverify 1st build failures on fresh workers
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64
65 rsync_defopts = ["-v", "--timeout=120"]
66
67 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72 def ini_parse_branch(section):
73 b = {}
74 name = section.get("name")
75
76 if not name:
77 raise ValueError("missing 'name' in " + repr(section))
78 if name in branches:
79 raise ValueError("duplicate branch name in " + repr(section))
80
81 b["name"] = name
82 b["bin_url"] = section.get("binary_url")
83 b["bin_key"] = section.get("binary_password")
84
85 b["src_url"] = section.get("source_url")
86 b["src_key"] = section.get("source_password")
87
88 b["gpg_key"] = section.get("gpg_key")
89
90 b["usign_key"] = section.get("usign_key")
91 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
92 b["usign_comment"] = section.get("usign_comment", usign_comment)
93
94 b["config_seed"] = section.get("config_seed")
95
96 b["kmod_archive"] = section.getboolean("kmod_archive", False)
97
98 branches[name] = b
99 log.msg("Configured branch: {}".format(name))
100
101 # PB port can be either a numeric port or a connection string
102 pb_port = inip1.get("port") or 9989
103
104 # This is the dictionary that the buildmaster pays attention to. We also use
105 # a shorter alias to save typing.
106 c = BuildmasterConfig = {}
107
108 ####### PROJECT IDENTITY
109
110 # the 'title' string will appear at the top of this buildbot
111 # installation's html.WebStatus home page (linked to the
112 # 'titleURL') and is embedded in the title of the waterfall HTML page.
113
114 c['title'] = ini['general'].get("title")
115 c['titleURL'] = ini['general'].get("title_url")
116
117 # the 'buildbotURL' string should point to the location where the buildbot's
118 # internal web server (usually the html.WebStatus page) is visible. This
119 # typically uses the port number set in the Waterfall 'status' entry, but
120 # with an externally-visible host name which the buildbot cannot figure out
121 # without some help.
122
123 c['buildbotURL'] = inip1.get("buildbot_url")
124
125 ####### BUILDWORKERS
126
127 # The 'workers' list defines the set of recognized buildworkers. Each element is
128 # a Worker object, specifying a unique worker name and password. The same
129 # worker name and password must be configured on the worker.
130
131 c['workers'] = []
132 NetLocks = dict()
133
134 def ini_parse_workers(section):
135 name = section.get("name")
136 password = section.get("password")
137 phase = section.getint("phase")
138 tagonly = section.getboolean("tag_only")
139 rsyncipv4 = section.getboolean("rsync_ipv4")
140
141 if not name or not password or not phase == 1:
142 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
143 return
144
145 sl_props = { 'tag_only':tagonly }
146 if "dl_lock" in section:
147 lockname = section.get("dl_lock")
148 sl_props['dl_lock'] = lockname
149 if lockname not in NetLocks:
150 NetLocks[lockname] = locks.MasterLock(lockname)
151 if "ul_lock" in section:
152 lockname = section.get("ul_lock")
153 sl_props['ul_lock'] = lockname
154 if lockname not in NetLocks:
155 NetLocks[lockname] = locks.MasterLock(lockname)
156 if rsyncipv4:
157 sl_props['rsync_ipv4'] = True # only set prop if required, we use '+' Interpolate substitution
158
159 log.msg("Configured worker: {}".format(name))
160 # NB: phase1 build factory requires workers to be single-build only
161 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
162
163
164 for section in ini.sections():
165 if section.startswith("branch "):
166 ini_parse_branch(ini[section])
167
168 if section.startswith("worker "):
169 ini_parse_workers(ini[section])
170
171 # list of branches in build-priority order
172 branchNames = [branches[b]["name"] for b in branches]
173
174 c['protocols'] = {'pb': {'port': pb_port}}
175
176 # coalesce builds
177 c['collapseRequests'] = True
178
179 # Reduce amount of backlog data
180 c['configurators'] = [util.JanitorConfigurator(
181 logHorizon=timedelta(days=3),
182 hour=6,
183 )]
184
185 @defer.inlineCallbacks
186 def getNewestCompleteTime(bldr):
187 """Returns the complete_at of the latest completed and not SKIPPED
188 build request for this builder, or None if there are no such build
189 requests. We need to filter out SKIPPED requests because we're
190 using collapseRequests=True which is unfortunately marking all
191 previous requests as complete when new buildset is created.
192
193 @returns: datetime instance or None, via Deferred
194 """
195
196 bldrid = yield bldr.getBuilderId()
197 completed = yield bldr.master.data.get(
198 ('builders', bldrid, 'buildrequests'),
199 [
200 resultspec.Filter('complete', 'eq', [True]),
201 resultspec.Filter('results', 'ne', [results.SKIPPED]),
202 ],
203 order=['-complete_at'], limit=1)
204 if not completed:
205 return
206
207 complete_at = completed[0]['complete_at']
208
209 last_build = yield bldr.master.data.get(
210 ('builds', ),
211 [
212 resultspec.Filter('builderid', 'eq', [bldrid]),
213 ],
214 order=['-started_at'], limit=1)
215
216 if last_build and last_build[0]:
217 last_complete_at = last_build[0]['complete_at']
218 if last_complete_at and (last_complete_at > complete_at):
219 return last_complete_at
220
221 return complete_at
222
223 @defer.inlineCallbacks
224 def prioritizeBuilders(master, builders):
225 """Returns sorted list of builders by their last timestamp of completed and
226 not skipped build, ordered first by branch name.
227
228 @returns: list of sorted builders
229 """
230
231 bldrNamePrio = { "__Janitor": 0, "00_force_build": 0 }
232 i = 1
233 for bname in branchNames:
234 bldrNamePrio[bname] = i
235 i += 1
236
237 def is_building(bldr):
238 return bool(bldr.building) or bool(bldr.old_building)
239
240 def bldr_info(bldr):
241 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
242 d.addCallback(lambda complete_at: (complete_at, bldr))
243 return d
244
245 def bldr_sort(item):
246 (complete_at, bldr) = item
247
248 pos = 99
249 for (name, prio) in bldrNamePrio.items():
250 if bldr.name.startswith(name):
251 pos = prio
252 break
253
254 if not complete_at:
255 date = datetime.min
256 complete_at = date.replace(tzinfo=tzutc())
257
258 if is_building(bldr):
259 date = datetime.max
260 complete_at = date.replace(tzinfo=tzutc())
261
262 return (pos, complete_at, bldr.name)
263
264 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
265 results.sort(key=bldr_sort)
266
267 #for r in results:
268 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
269
270 return [r[1] for r in results]
271
272 c['prioritizeBuilders'] = prioritizeBuilders
273
274 ####### CHANGESOURCES
275
276 # find targets
277 targets = set()
278
279 def populateTargets():
280 """ fetch a shallow clone of each configured branch in turn:
281 execute dump-target-info.pl and collate the results to ensure
282 targets that only exist in specific branches get built.
283 This takes a while during master startup but is executed only once.
284 """
285 log.msg("Populating targets, this will take time")
286 sourcegit = work_dir + '/source.git'
287 for branch in branchNames:
288 if os.path.isdir(sourcegit):
289 subprocess.call(["rm", "-rf", sourcegit])
290
291 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
292
293 os.makedirs(sourcegit + '/tmp', exist_ok=True)
294 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
295 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
296
297 while True:
298 line = findtargets.stdout.readline()
299 if not line:
300 break
301 ta = line.decode().strip().split(' ')
302 targets.add(ta[0])
303
304 subprocess.call(["rm", "-rf", sourcegit])
305
306 populateTargets()
307
308 # the 'change_source' setting tells the buildmaster how it should find out
309 # about source code changes.
310
311 c['change_source'] = []
312 c['change_source'].append(GitPoller(
313 repo_url,
314 workdir=work_dir+'/work.git', branches=branchNames,
315 pollAtLaunch=True, pollinterval=300))
316
317 ####### SCHEDULERS
318
319 # Configure the Schedulers, which decide how to react to incoming changes.
320
321 # Selector for known valid tags
322 class TagChoiceParameter(BaseParameter):
323 spec_attributes = ["strict", "choices"]
324 type = "list"
325 strict = True
326
327 def __init__(self, name, label=None, **kw):
328 super().__init__(name, label, **kw)
329 self._choice_list = []
330
331 def getRevTags(self, findtag=None):
332 taglist = []
333 branchvers = []
334
335 # we will filter out tags that do no match the configured branches
336 for b in branchNames:
337 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
338 if basever:
339 branchvers.append(basever[1])
340
341 # grab tags from remote repository
342 alltags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
345
346 while True:
347 line = alltags.stdout.readline()
348
349 if not line:
350 break
351
352 (rev, tag) = line.split()
353
354 # does it match known format? ('vNN.NN.NN(-rcN)')
355 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
356
357 # only list valid tags matching configured branches
358 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
359 # if we want a specific tag, ignore all that don't match
360 if findtag and findtag != tagver[1]:
361 continue
362 taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]})
363
364 return taglist
365
366 @property
367 def choices(self):
368 taglist = [rt['tag'] for rt in self.getRevTags()]
369 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
370 taglist.insert(0, '')
371
372 self._choice_list = taglist
373
374 return self._choice_list
375
376 def updateFromKwargs(self, properties, kwargs, **unused):
377 tag = self.getFromKwargs(kwargs)
378 properties[self.name] = tag
379
380 # find the commit matching the tag
381 findtag = self.getRevTags(tag)
382
383 if not findtag:
384 raise ValidationError("Couldn't find tag")
385
386 properties['force_revision'] = findtag[0]['rev']
387
388 # find the branch matching the tag
389 branch = None
390 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
391 for b in branchNames:
392 if b.endswith(branchver[1]):
393 branch = b
394
395 if not branch:
396 raise ValidationError("Couldn't find branch")
397
398 properties['force_branch'] = branch
399
400 def parse_from_arg(self, s):
401 if self.strict and s not in self._choice_list:
402 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
403 return s
404
405 @util.renderer
406 @defer.inlineCallbacks
407 def builderNames(props):
408 """ since we have per branch and per target builders,
409 address the relevant builder for each new buildrequest
410 based on the request's desired branch and target.
411 """
412 branch = props.getProperty("branch")
413 target = props.getProperty("target", "")
414
415 if target == "all":
416 target = ""
417
418 # if that didn't work, try sourcestamp to find a branch
419 if not branch:
420 # match builders with target branch
421 ss = props.sourcestamps[0]
422 if ss:
423 branch = ss['branch']
424 else:
425 log.msg("couldn't find builder")
426 return [] # nothing works
427
428 bname = branch + "_" + target
429 builders = []
430
431 for b in (yield props.master.data.get(('builders',))):
432 if not b['name'].startswith(bname):
433 continue
434 builders.append(b['name'])
435
436 return builders
437
438 c['schedulers'] = []
439 c['schedulers'].append(AnyBranchScheduler(
440 name = "all",
441 change_filter = util.ChangeFilter(branch=branchNames),
442 treeStableTimer = 15*60,
443 builderNames = builderNames))
444
445 c['schedulers'].append(ForceScheduler(
446 name = "force",
447 buttonName = "Force builds",
448 label = "Force build details",
449 builderNames = [ "00_force_build" ],
450
451 codebases = [
452 util.CodebaseParameter(
453 "",
454 label = "Repository",
455 branch = util.FixedParameter(name = "branch", default = ""),
456 revision = util.FixedParameter(name = "revision", default = ""),
457 repository = util.FixedParameter(name = "repository", default = ""),
458 project = util.FixedParameter(name = "project", default = "")
459 )
460 ],
461
462 reason = util.StringParameter(
463 name = "reason",
464 label = "Reason",
465 default = "Trigger build",
466 required = True,
467 size = 80
468 ),
469
470 properties = [
471 # NB: avoid nesting to simplify processing of properties
472 util.ChoiceStringParameter(
473 name = "target",
474 label = "Build target",
475 default = "all",
476 choices = [ "all" ] + list(targets)
477 ),
478 TagChoiceParameter(
479 name = "tag",
480 label = "Build tag",
481 default = ""
482 )
483 ]
484 ))
485
486 c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames))
487
488 ####### BUILDERS
489
490 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
491 # what steps, and which workers can execute them. Note that any particular build will
492 # only take place on one worker.
493
494 def IsNoMasterBuild(step):
495 return step.getProperty("branch") != "master"
496
497 def IsUsignEnabled(step):
498 branch = step.getProperty("branch")
499 return branch and branches[branch].get("usign_key")
500
501 def IsSignEnabled(step):
502 branch = step.getProperty("branch")
503 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
504
505 def IsKmodArchiveEnabled(step):
506 branch = step.getProperty("branch")
507 return branch and branches[branch].get("kmod_archive")
508
509 def IsKmodArchiveAndRsyncEnabled(step):
510 branch = step.getProperty("branch")
511 return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
512
513 def GetBaseVersion(branch):
514 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
515 return branch.split('-')[1]
516 else:
517 return "master"
518
519 @properties.renderer
520 def GetVersionPrefix(props):
521 branch = props.getProperty("branch")
522 basever = GetBaseVersion(branch)
523 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
524 return "%s/" % props["tag"][1:]
525 elif basever != "master":
526 return "%s-SNAPSHOT/" % basever
527 else:
528 return ""
529
530 @util.renderer
531 def GetConfigSeed(props):
532 branch = props.getProperty("branch")
533 return branch and branches[branch].get("config_seed") or ""
534
535 @util.renderer
536 def GetRsyncParams(props, srcorbin, urlorkey):
537 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
538 branch = props.getProperty("branch")
539 opt = srcorbin + "_" + urlorkey
540 return branch and branches[branch].get(opt)
541
542 @util.renderer
543 def GetUsignKey(props):
544 branch = props.getProperty("branch")
545 return branch and branches[branch].get("usign_key")
546
547 def GetNextBuild(builder, requests):
548 for r in requests:
549 if r.properties:
550 # order tagged build first
551 if r.properties.hasProperty("tag"):
552 return r
553
554 r = requests[0]
555 #log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
556 return r
557
558 def MakeEnv(overrides=None, tryccache=False):
559 env = {
560 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
561 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
562 }
563 if tryccache:
564 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
565 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
566 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
567 else:
568 env['CC'] = env['CCC']
569 env['CXX'] = env['CCXX']
570 env['CCACHE'] = ''
571 if overrides is not None:
572 env.update(overrides)
573 return env
574
575 @properties.renderer
576 def NetLockDl(props, extralock=None):
577 lock = None
578 if props.hasProperty("dl_lock"):
579 lock = NetLocks[props["dl_lock"]]
580 if lock is not None:
581 return [lock.access('exclusive')]
582 else:
583 return []
584
585 @properties.renderer
586 def NetLockUl(props):
587 lock = None
588 if props.hasProperty("ul_lock"):
589 lock = NetLocks[props["ul_lock"]]
590 if lock is not None:
591 return [lock.access('exclusive')]
592 else:
593 return []
594
595 def IsTargetSelected(target):
596 def CheckTargetProperty(step):
597 selected_target = step.getProperty("target", "all")
598 if selected_target != "all" and selected_target != target:
599 return False
600 return True
601
602 return CheckTargetProperty
603
604 @util.renderer
605 def UsignSec2Pub(props):
606 branch = props.getProperty("branch")
607 try:
608 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
609 seckey = branches[branch].get("usign_key")
610 seckey = base64.b64decode(seckey)
611 except Exception:
612 return None
613
614 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
615 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
616
617
618 def canStartBuild(builder, wfb, request):
619 """ filter out non tag requests for tag_only workers. """
620 wtagonly = wfb.worker.properties.getProperty('tag_only')
621 tag = request.properties.getProperty('tag')
622
623 if wtagonly and not tag:
624 return False
625
626 return True
627
628 c['builders'] = []
629
630 workerNames = [ ]
631
632 for worker in c['workers']:
633 workerNames.append(worker.workername)
634
635 # add a single LocalWorker to handle the forcebuild builder
636 c['workers'].append(LocalWorker("__local_force_build", max_builds=1))
637
638 force_factory = BuildFactory()
639 force_factory.addStep(steps.Trigger(
640 name = "trigger_build",
641 schedulerNames = [ "trigger" ],
642 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
643 set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") },
644 ))
645
646 c['builders'].append(BuilderConfig(
647 name = "00_force_build",
648 workername = "__local_force_build",
649 factory = force_factory))
650
651
652 # NB the phase1 build factory assumes workers are single-build only
653 for target in targets:
654 ts = target.split('/')
655
656 factory = BuildFactory()
657
658 # setup shared work directory if required
659 factory.addStep(ShellCommand(
660 name = "sharedwd",
661 descriptionDone = "Shared work directory set up",
662 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
663 workdir = ".",
664 haltOnFailure = True,
665 ))
666
667 # find number of cores
668 factory.addStep(SetPropertyFromCommand(
669 name = "nproc",
670 property = "nproc",
671 description = "Finding number of CPUs",
672 command = ["nproc"],
673 ))
674
675 # find gcc and g++ compilers
676 factory.addStep(FileDownload(
677 name = "dlfindbinpl",
678 mastersrc = scripts_dir + '/findbin.pl',
679 workerdest = "../findbin.pl",
680 mode = 0o755,
681 ))
682
683 factory.addStep(SetPropertyFromCommand(
684 name = "gcc",
685 property = "cc_command",
686 description = "Finding gcc command",
687 command = ["../findbin.pl", "gcc", "", ""],
688 haltOnFailure = True,
689 ))
690
691 factory.addStep(SetPropertyFromCommand(
692 name = "g++",
693 property = "cxx_command",
694 description = "Finding g++ command",
695 command = ["../findbin.pl", "g++", "", ""],
696 haltOnFailure = True,
697 ))
698
699 # see if ccache is available
700 factory.addStep(SetPropertyFromCommand(
701 name = "ccache",
702 property = "ccache_command",
703 description = "Testing for ccache command",
704 command = ["which", "ccache"],
705 haltOnFailure = False,
706 flunkOnFailure = False,
707 warnOnFailure = False,
708 hideStepIf = lambda r, s: r==results.FAILURE,
709 ))
710
711 # check out the source
712 # Git() runs:
713 # if repo doesn't exist: 'git clone repourl'
714 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
715 # git cat-file -e <commit>
716 # git checkout -f <commit>
717 # git checkout -B <branch>
718 # git rev-parse HEAD
719 factory.addStep(Git(
720 name = "git",
721 repourl = repo_url,
722 mode = 'full',
723 method = 'fresh',
724 locks = NetLockDl,
725 haltOnFailure = True,
726 ))
727
728 # workaround for https://github.com/openwrt/buildbot/issues/5
729 factory.addStep(Git(
730 name = "git me once more please",
731 repourl = repo_url,
732 mode = 'full',
733 method = 'fresh',
734 locks = NetLockDl,
735 haltOnFailure = True,
736 ))
737
738 # update remote refs
739 factory.addStep(ShellCommand(
740 name = "fetchrefs",
741 description = "Fetching Git remote refs",
742 descriptionDone = "Git remote refs fetched",
743 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
744 haltOnFailure = True,
745 ))
746
747 # Verify that Git HEAD points to a tag or branch
748 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
749 factory.addStep(ShellCommand(
750 name = "gitverify",
751 description = "Ensuring that Git HEAD is pointing to a branch or tag",
752 descriptionDone = "Git HEAD is sane",
753 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
754 haltOnFailure = True,
755 ))
756
757 factory.addStep(ShellCommand(
758 name = "rmtmp",
759 description = "Remove tmp folder",
760 command=["rm", "-rf", "tmp/"],
761 ))
762
763 # feed
764 factory.addStep(ShellCommand(
765 name = "rmfeedlinks",
766 description = "Remove feed symlinks",
767 command=["rm", "-rf", "package/feeds/"],
768 ))
769
770 factory.addStep(StringDownload(
771 name = "ccachecc",
772 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
773 workerdest = "../ccache_cc.sh",
774 mode = 0o755,
775 ))
776
777 factory.addStep(StringDownload(
778 name = "ccachecxx",
779 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
780 workerdest = "../ccache_cxx.sh",
781 mode = 0o755,
782 ))
783
784 # feed
785 factory.addStep(ShellCommand(
786 name = "updatefeeds",
787 description = "Updating feeds",
788 command=["./scripts/feeds", "update"],
789 env = MakeEnv(tryccache=True),
790 haltOnFailure = True,
791 locks = NetLockDl,
792 ))
793
794 # feed
795 factory.addStep(ShellCommand(
796 name = "installfeeds",
797 description = "Installing feeds",
798 command=["./scripts/feeds", "install", "-a"],
799 env = MakeEnv(tryccache=True),
800 haltOnFailure = True,
801 ))
802
803 # seed config
804 factory.addStep(StringDownload(
805 name = "dlconfigseed",
806 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
807 workerdest = ".config",
808 mode = 0o644,
809 ))
810
811 # configure
812 factory.addStep(ShellCommand(
813 name = "newconfig",
814 descriptionDone = ".config seeded",
815 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
816 ))
817
818 factory.addStep(ShellCommand(
819 name = "delbin",
820 description = "Removing output directory",
821 command = ["rm", "-rf", "bin/"],
822 ))
823
824 factory.addStep(ShellCommand(
825 name = "defconfig",
826 description = "Populating .config",
827 command = ["make", "defconfig"],
828 env = MakeEnv(),
829 ))
830
831 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
832 factory.addStep(ShellCommand(
833 name = "checkarch",
834 description = "Checking architecture",
835 descriptionDone = "Architecture validated",
836 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
837 logEnviron = False,
838 want_stdout = False,
839 want_stderr = False,
840 haltOnFailure = True,
841 flunkOnFailure = False, # this is not a build FAILURE - TODO mark build as SKIPPED
842 ))
843
844 # find libc suffix
845 factory.addStep(SetPropertyFromCommand(
846 name = "libc",
847 property = "libc",
848 description = "Finding libc suffix",
849 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
850 ))
851
852 # install build key
853 factory.addStep(StringDownload(
854 name = "dlkeybuildpub",
855 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
856 workerdest = "key-build.pub",
857 mode = 0o600,
858 doStepIf = IsUsignEnabled,
859 ))
860
861 factory.addStep(StringDownload(
862 name = "dlkeybuild",
863 s = "# fake private key",
864 workerdest = "key-build",
865 mode = 0o600,
866 doStepIf = IsUsignEnabled,
867 ))
868
869 factory.addStep(StringDownload(
870 name = "dlkeybuilducert",
871 s = "# fake certificate",
872 workerdest = "key-build.ucert",
873 mode = 0o600,
874 doStepIf = IsUsignEnabled,
875 ))
876
877 # prepare dl
878 factory.addStep(ShellCommand(
879 name = "dldir",
880 description = "Preparing dl/",
881 descriptionDone = "dl/ prepared",
882 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
883 workdir = Property("builddir"),
884 logEnviron = False,
885 want_stdout = False,
886 ))
887
888 # cleanup dl
889 factory.addStep(ShellCommand(
890 name = "dlprune",
891 description = "Pruning dl/",
892 descriptionDone = "dl/ pruned",
893 command = 'find dl/ -atime +15 -delete -print',
894 logEnviron = False,
895 ))
896
897 # prepare tar
898 factory.addStep(ShellCommand(
899 name = "dltar",
900 description = "Building and installing GNU tar",
901 descriptionDone = "GNU tar built and installed",
902 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
903 env = MakeEnv(tryccache=True),
904 haltOnFailure = True,
905 ))
906
907 # populate dl
908 factory.addStep(ShellCommand(
909 name = "dlrun",
910 description = "Populating dl/",
911 descriptionDone = "dl/ populated",
912 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
913 env = MakeEnv(),
914 logEnviron = False,
915 locks = NetLockDl,
916 ))
917
918 factory.addStep(ShellCommand(
919 name = "cleanbase",
920 description = "Cleaning base-files",
921 command=["make", "package/base-files/clean", "V=s"],
922 ))
923
924 # build
925 factory.addStep(ShellCommand(
926 name = "tools",
927 description = "Building and installing tools",
928 descriptionDone = "Tools built and installed",
929 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
930 env = MakeEnv(tryccache=True),
931 haltOnFailure = True,
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "toolchain",
936 description = "Building and installing toolchain",
937 descriptionDone = "Toolchain built and installed",
938 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
939 env = MakeEnv(),
940 haltOnFailure = True,
941 ))
942
943 factory.addStep(ShellCommand(
944 name = "kmods",
945 description = "Building kmods",
946 descriptionDone = "Kmods built",
947 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
948 env = MakeEnv(),
949 haltOnFailure = True,
950 ))
951
952 # find kernel version
953 factory.addStep(SetPropertyFromCommand(
954 name = "kernelversion",
955 property = "kernelversion",
956 description = "Finding the effective Kernel version",
957 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
958 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
959 ))
960
961 factory.addStep(ShellCommand(
962 name = "pkgclean",
963 description = "Cleaning up package build",
964 descriptionDone = "Package build cleaned up",
965 command=["make", "package/cleanup", "V=s"],
966 ))
967
968 factory.addStep(ShellCommand(
969 name = "pkgbuild",
970 description = "Building packages",
971 descriptionDone = "Packages built",
972 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
973 env = MakeEnv(),
974 haltOnFailure = True,
975 ))
976
977 factory.addStep(ShellCommand(
978 name = "pkginstall",
979 description = "Installing packages",
980 descriptionDone = "Packages installed",
981 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
982 env = MakeEnv(),
983 haltOnFailure = True,
984 ))
985
986 factory.addStep(ShellCommand(
987 name = "pkgindex",
988 description = "Indexing packages",
989 descriptionDone = "Packages indexed",
990 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
991 env = MakeEnv(),
992 haltOnFailure = True,
993 ))
994
995 factory.addStep(ShellCommand(
996 name = "images",
997 description = "Building and installing images",
998 descriptionDone = "Images built and installed",
999 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
1000 env = MakeEnv(),
1001 haltOnFailure = True,
1002 ))
1003
1004 factory.addStep(ShellCommand(
1005 name = "buildinfo",
1006 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1007 command = "make -j1 buildinfo V=s || true",
1008 env = MakeEnv(),
1009 haltOnFailure = True,
1010 ))
1011
1012 factory.addStep(ShellCommand(
1013 name = "json_overview_image_info",
1014 description = "Generating profiles.json in target folder",
1015 command = "make -j1 json_overview_image_info V=s || true",
1016 env = MakeEnv(),
1017 haltOnFailure = True,
1018 ))
1019
1020 factory.addStep(ShellCommand(
1021 name = "checksums",
1022 description = "Calculating checksums",
1023 descriptionDone = "Checksums calculated",
1024 command=["make", "-j1", "checksum", "V=s"],
1025 env = MakeEnv(),
1026 haltOnFailure = True,
1027 ))
1028
1029 factory.addStep(ShellCommand(
1030 name = "kmoddir",
1031 descriptionDone = "Kmod directory created",
1032 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1033 haltOnFailure = True,
1034 doStepIf = IsKmodArchiveEnabled,
1035 ))
1036
1037 factory.addStep(ShellCommand(
1038 name = "kmodprepare",
1039 description = "Preparing kmod archive",
1040 descriptionDone = "Kmod archive prepared",
1041 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1042 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1043 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1044 haltOnFailure = True,
1045 doStepIf = IsKmodArchiveEnabled,
1046 ))
1047
1048 factory.addStep(ShellCommand(
1049 name = "kmodindex",
1050 description = "Indexing kmod archive",
1051 descriptionDone = "Kmod archive indexed",
1052 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1053 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1054 env = MakeEnv(),
1055 haltOnFailure = True,
1056 doStepIf = IsKmodArchiveEnabled,
1057 ))
1058
1059 # sign
1060 factory.addStep(MasterShellCommand(
1061 name = "signprepare",
1062 descriptionDone = "Temporary signing directory prepared",
1063 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1064 haltOnFailure = True,
1065 doStepIf = IsSignEnabled,
1066
1067 ))
1068
1069 factory.addStep(ShellCommand(
1070 name = "signpack",
1071 description = "Packing files to sign",
1072 descriptionDone = "Files to sign packed",
1073 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1074 haltOnFailure = True,
1075 doStepIf = IsSignEnabled,
1076 ))
1077
1078 factory.addStep(FileUpload(
1079 workersrc = "sign.tar.gz",
1080 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1081 haltOnFailure = True,
1082 doStepIf = IsSignEnabled,
1083 ))
1084
1085 factory.addStep(MasterShellCommand(
1086 name = "signfiles",
1087 description = "Signing files",
1088 descriptionDone = "Files signed",
1089 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
1090 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1091 haltOnFailure = True,
1092 doStepIf = IsSignEnabled,
1093 ))
1094
1095 factory.addStep(FileDownload(
1096 name = "dlsigntargz",
1097 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1098 workerdest = "sign.tar.gz",
1099 haltOnFailure = True,
1100 doStepIf = IsSignEnabled,
1101 ))
1102
1103 factory.addStep(ShellCommand(
1104 name = "signunpack",
1105 description = "Unpacking signed files",
1106 descriptionDone = "Signed files unpacked",
1107 command = ["tar", "-xzf", "sign.tar.gz"],
1108 haltOnFailure = True,
1109 doStepIf = IsSignEnabled,
1110 ))
1111
1112 # upload
1113 factory.addStep(ShellCommand(
1114 name = "dirprepare",
1115 descriptionDone = "Upload directory structure prepared",
1116 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1117 haltOnFailure = True,
1118 ))
1119
1120 factory.addStep(ShellCommand(
1121 name = "linkprepare",
1122 descriptionDone = "Repository symlink prepared",
1123 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1124 doStepIf = IsNoMasterBuild,
1125 haltOnFailure = True,
1126 ))
1127
1128 factory.addStep(ShellCommand(
1129 name = "kmoddirprepare",
1130 descriptionDone = "Kmod archive upload directory prepared",
1131 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1132 haltOnFailure = True,
1133 doStepIf = IsKmodArchiveEnabled,
1134 ))
1135
1136 factory.addStep(ShellCommand(
1137 name = "dirupload",
1138 description = "Uploading directory structure",
1139 descriptionDone = "Directory structure uploaded",
1140 command = ["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1141 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1142 haltOnFailure = True,
1143 logEnviron = False,
1144 locks = NetLockUl,
1145 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1146 ))
1147
1148 # download remote sha256sums to 'target-sha256sums'
1149 factory.addStep(ShellCommand(
1150 name = "target-sha256sums",
1151 description = "Fetching remote sha256sums for target",
1152 descriptionDone = "Remote sha256sums for target fetched",
1153 command = ["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1154 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1155 logEnviron = False,
1156 haltOnFailure = False,
1157 flunkOnFailure = False,
1158 warnOnFailure = False,
1159 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1160 ))
1161
1162 # build list of files to upload
1163 factory.addStep(FileDownload(
1164 name = "dlsha2rsyncpl",
1165 mastersrc = scripts_dir + '/sha2rsync.pl',
1166 workerdest = "../sha2rsync.pl",
1167 mode = 0o755,
1168 ))
1169
1170 factory.addStep(ShellCommand(
1171 name = "buildlist",
1172 description = "Building list of files to upload",
1173 descriptionDone = "List of files to upload built",
1174 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1175 haltOnFailure = True,
1176 ))
1177
1178 factory.addStep(FileDownload(
1179 name = "dlrsync.sh",
1180 mastersrc = scripts_dir + '/rsync.sh',
1181 workerdest = "../rsync.sh",
1182 mode = 0o755,
1183 ))
1184
1185 # upload new files and update existing ones
1186 factory.addStep(ShellCommand(
1187 name = "targetupload",
1188 description = "Uploading target files",
1189 descriptionDone = "Target files uploaded",
1190 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1191 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1192 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1193 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1194 haltOnFailure = True,
1195 logEnviron = False,
1196 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1197 ))
1198
1199 # delete files which don't exist locally
1200 factory.addStep(ShellCommand(
1201 name = "targetprune",
1202 description = "Pruning target files",
1203 descriptionDone = "Target files pruned",
1204 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1205 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1206 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1207 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1208 haltOnFailure = True,
1209 logEnviron = False,
1210 locks = NetLockUl,
1211 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1212 ))
1213
1214 factory.addStep(ShellCommand(
1215 name = "kmodupload",
1216 description = "Uploading kmod archive",
1217 descriptionDone = "Kmod archive uploaded",
1218 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1219 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1220 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1221 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1222 haltOnFailure = True,
1223 logEnviron = False,
1224 locks = NetLockUl,
1225 doStepIf = IsKmodArchiveAndRsyncEnabled,
1226 ))
1227
1228 factory.addStep(ShellCommand(
1229 name = "sourcelist",
1230 description = "Finding source archives to upload",
1231 descriptionDone = "Source archives to upload found",
1232 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1233 haltOnFailure = True,
1234 ))
1235
1236 factory.addStep(ShellCommand(
1237 name = "sourceupload",
1238 description = "Uploading source archives",
1239 descriptionDone = "Source archives uploaded",
1240 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1241 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), Interpolate("-a%(prop:rsync_ipv4:+4)s"), "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1242 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1243 haltOnFailure = True,
1244 logEnviron = False,
1245 locks = NetLockUl,
1246 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1247 ))
1248
1249 factory.addStep(ShellCommand(
1250 name = "df",
1251 description = "Reporting disk usage",
1252 command=["df", "-h", "."],
1253 env={'LC_ALL': 'C'},
1254 logEnviron = False,
1255 haltOnFailure = False,
1256 flunkOnFailure = False,
1257 warnOnFailure = False,
1258 alwaysRun = True,
1259 ))
1260
1261 factory.addStep(ShellCommand(
1262 name = "du",
1263 description = "Reporting estimated file space usage",
1264 command=["du", "-sh", "."],
1265 env={'LC_ALL': 'C'},
1266 logEnviron = False,
1267 haltOnFailure = False,
1268 flunkOnFailure = False,
1269 warnOnFailure = False,
1270 alwaysRun = True,
1271 ))
1272
1273 factory.addStep(ShellCommand(
1274 name = "ccachestat",
1275 description = "Reporting ccache stats",
1276 command=["ccache", "-s"],
1277 logEnviron = False,
1278 want_stderr = False,
1279 haltOnFailure = False,
1280 flunkOnFailure = False,
1281 warnOnFailure = False,
1282 doStepIf = util.Transform(bool, Property("ccache_command")),
1283 ))
1284
1285 for brname in branchNames:
1286 bldrname = brname + "_" + target
1287 c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, tags=[brname,], nextBuild=GetNextBuild, canStartBuild=canStartBuild))
1288
1289
1290 ####### STATUS TARGETS
1291
1292 # 'status' is a list of Status Targets. The results of each build will be
1293 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1294 # including web pages, email senders, and IRC bots.
1295
1296 if "status_bind" in inip1:
1297 c['www'] = {
1298 'port': inip1.get("status_bind"),
1299 'plugins': {
1300 'waterfall_view': True,
1301 'console_view': True,
1302 'grid_view': True
1303 }
1304 }
1305
1306 if "status_user" in inip1 and "status_password" in inip1:
1307 c['www']['auth'] = util.UserPasswordAuth([
1308 (inip1.get("status_user"), inip1.get("status_password"))
1309 ])
1310 c['www']['authz'] = util.Authz(
1311 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1312 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1313 )
1314
1315 c['services'] = []
1316 if ini.has_section("irc"):
1317 iniirc = ini['irc']
1318 irc_host = iniirc.get("host", None)
1319 irc_port = iniirc.getint("port", 6667)
1320 irc_chan = iniirc.get("channel", None)
1321 irc_nick = iniirc.get("nickname", None)
1322 irc_pass = iniirc.get("password", None)
1323
1324 if irc_host and irc_nick and irc_chan:
1325 irc = reporters.IRC(irc_host, irc_nick,
1326 port = irc_port,
1327 password = irc_pass,
1328 channels = [ irc_chan ],
1329 notify_events = [ 'exception', 'problem', 'recovery' ]
1330 )
1331
1332 c['services'].append(irc)
1333
1334 c['revlink'] = util.RevlinkMatch([
1335 r'https://git.openwrt.org/openwrt/(.*).git'
1336 ],
1337 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1338
1339 ####### DB URL
1340
1341 c['db'] = {
1342 # This specifies what database buildbot uses to store its state. You can leave
1343 # this at its default for all but the largest installations.
1344 'db_url' : "sqlite:///state.sqlite",
1345 }
1346
1347 c['buildbotNetUsageData'] = None