phase1: dlprune: fix cannot delete ‘dl/’: Not a directory
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # Globals
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini['repo'].get("url")
64
65 rsync_defopts = ["-v", "--timeout=120"]
66
67 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72 def ini_parse_branch(section):
73 b = {}
74 name = section.get("name")
75
76 if not name:
77 raise ValueError("missing 'name' in " + repr(section))
78 if name in branches:
79 raise ValueError("duplicate branch name in " + repr(section))
80
81 b["name"] = name
82 b["bin_url"] = section.get("binary_url")
83 b["bin_key"] = section.get("binary_password")
84
85 b["src_url"] = section.get("source_url")
86 b["src_key"] = section.get("source_password")
87
88 b["gpg_key"] = section.get("gpg_key")
89
90 b["usign_key"] = section.get("usign_key")
91 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
92 b["usign_comment"] = section.get("usign_comment", usign_comment)
93
94 b["config_seed"] = section.get("config_seed")
95
96 b["kmod_archive"] = section.getboolean("kmod_archive", False)
97
98 branches[name] = b
99 log.msg("Configured branch: {}".format(name))
100
101 # PB port can be either a numeric port or a connection string
102 pb_port = inip1.get("port") or 9989
103
104 # This is the dictionary that the buildmaster pays attention to. We also use
105 # a shorter alias to save typing.
106 c = BuildmasterConfig = {}
107
108 ####### PROJECT IDENTITY
109
110 # the 'title' string will appear at the top of this buildbot
111 # installation's html.WebStatus home page (linked to the
112 # 'titleURL') and is embedded in the title of the waterfall HTML page.
113
114 c['title'] = ini['general'].get("title")
115 c['titleURL'] = ini['general'].get("title_url")
116
117 # the 'buildbotURL' string should point to the location where the buildbot's
118 # internal web server (usually the html.WebStatus page) is visible. This
119 # typically uses the port number set in the Waterfall 'status' entry, but
120 # with an externally-visible host name which the buildbot cannot figure out
121 # without some help.
122
123 c['buildbotURL'] = inip1.get("buildbot_url")
124
125 ####### BUILDWORKERS
126
127 # The 'workers' list defines the set of recognized buildworkers. Each element is
128 # a Worker object, specifying a unique worker name and password. The same
129 # worker name and password must be configured on the worker.
130
131 c['workers'] = []
132 NetLocks = dict()
133
134 def ini_parse_workers(section):
135 name = section.get("name")
136 password = section.get("password")
137 phase = section.getint("phase")
138 tagonly = section.getboolean("tag_only")
139 rsyncipv4 = section.getboolean("rsync_ipv4")
140
141 if not name or not password or not phase == 1:
142 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
143 return
144
145 sl_props = { 'tag_only':tagonly }
146 if "dl_lock" in section:
147 lockname = section.get("dl_lock")
148 sl_props['dl_lock'] = lockname
149 if lockname not in NetLocks:
150 NetLocks[lockname] = locks.MasterLock(lockname)
151 if "ul_lock" in section:
152 lockname = section.get("ul_lock")
153 sl_props['ul_lock'] = lockname
154 if lockname not in NetLocks:
155 NetLocks[lockname] = locks.MasterLock(lockname)
156 if rsyncipv4:
157 sl_props['rsync_ipv4'] = True # only set prop if required, we use '+' Interpolate substitution
158
159 log.msg("Configured worker: {}".format(name))
160 # NB: phase1 build factory requires workers to be single-build only
161 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
162
163
164 for section in ini.sections():
165 if section.startswith("branch "):
166 ini_parse_branch(ini[section])
167
168 if section.startswith("worker "):
169 ini_parse_workers(ini[section])
170
171 # list of branches in build-priority order
172 branchNames = [branches[b]["name"] for b in branches]
173
174 c['protocols'] = {'pb': {'port': pb_port}}
175
176 # coalesce builds
177 c['collapseRequests'] = True
178
179 # Reduce amount of backlog data
180 c['configurators'] = [util.JanitorConfigurator(
181 logHorizon=timedelta(days=3),
182 hour=6,
183 )]
184
185 @defer.inlineCallbacks
186 def getNewestCompleteTime(bldr):
187 """Returns the complete_at of the latest completed and not SKIPPED
188 build request for this builder, or None if there are no such build
189 requests. We need to filter out SKIPPED requests because we're
190 using collapseRequests=True which is unfortunately marking all
191 previous requests as complete when new buildset is created.
192
193 @returns: datetime instance or None, via Deferred
194 """
195
196 bldrid = yield bldr.getBuilderId()
197 completed = yield bldr.master.data.get(
198 ('builders', bldrid, 'buildrequests'),
199 [
200 resultspec.Filter('complete', 'eq', [True]),
201 resultspec.Filter('results', 'ne', [results.SKIPPED]),
202 ],
203 order=['-complete_at'], limit=1)
204 if not completed:
205 return
206
207 complete_at = completed[0]['complete_at']
208
209 last_build = yield bldr.master.data.get(
210 ('builds', ),
211 [
212 resultspec.Filter('builderid', 'eq', [bldrid]),
213 ],
214 order=['-started_at'], limit=1)
215
216 if last_build and last_build[0]:
217 last_complete_at = last_build[0]['complete_at']
218 if last_complete_at and (last_complete_at > complete_at):
219 return last_complete_at
220
221 return complete_at
222
223 @defer.inlineCallbacks
224 def prioritizeBuilders(master, builders):
225 """Returns sorted list of builders by their last timestamp of completed and
226 not skipped build, ordered first by branch name.
227
228 @returns: list of sorted builders
229 """
230
231 bldrNamePrio = { "__Janitor": 0, "00_force_build": 0 }
232 i = 1
233 for bname in branchNames:
234 bldrNamePrio[bname] = i
235 i += 1
236
237 def is_building(bldr):
238 return bool(bldr.building) or bool(bldr.old_building)
239
240 def bldr_info(bldr):
241 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
242 d.addCallback(lambda complete_at: (complete_at, bldr))
243 return d
244
245 def bldr_sort(item):
246 (complete_at, bldr) = item
247
248 pos = 99
249 for (name, prio) in bldrNamePrio.items():
250 if bldr.name.startswith(name):
251 pos = prio
252 break
253
254 if not complete_at:
255 date = datetime.min
256 complete_at = date.replace(tzinfo=tzutc())
257
258 if is_building(bldr):
259 date = datetime.max
260 complete_at = date.replace(tzinfo=tzutc())
261
262 return (pos, complete_at, bldr.name)
263
264 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
265 results.sort(key=bldr_sort)
266
267 #for r in results:
268 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
269
270 return [r[1] for r in results]
271
272 c['prioritizeBuilders'] = prioritizeBuilders
273
274 ####### CHANGESOURCES
275
276 # find targets
277 targets = set()
278
279 def populateTargets():
280 """ fetch a shallow clone of each configured branch in turn:
281 execute dump-target-info.pl and collate the results to ensure
282 targets that only exist in specific branches get built.
283 This takes a while during master startup but is executed only once.
284 """
285 log.msg("Populating targets, this will take time")
286 sourcegit = work_dir + '/source.git'
287 for branch in branchNames:
288 if os.path.isdir(sourcegit):
289 subprocess.call(["rm", "-rf", sourcegit])
290
291 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
292
293 os.makedirs(sourcegit + '/tmp', exist_ok=True)
294 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
295 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
296
297 while True:
298 line = findtargets.stdout.readline()
299 if not line:
300 break
301 ta = line.decode().strip().split(' ')
302 targets.add(ta[0])
303
304 subprocess.call(["rm", "-rf", sourcegit])
305
306 populateTargets()
307
308 # the 'change_source' setting tells the buildmaster how it should find out
309 # about source code changes.
310
311 c['change_source'] = []
312 c['change_source'].append(GitPoller(
313 repo_url,
314 workdir=work_dir+'/work.git', branches=branchNames,
315 pollAtLaunch=True, pollinterval=300))
316
317 ####### SCHEDULERS
318
319 # Configure the Schedulers, which decide how to react to incoming changes.
320
321 # Selector for known valid tags
322 class TagChoiceParameter(BaseParameter):
323 spec_attributes = ["strict", "choices"]
324 type = "list"
325 strict = True
326
327 def __init__(self, name, label=None, **kw):
328 super().__init__(name, label, **kw)
329 self._choice_list = []
330
331 def getRevTags(self, findtag=None):
332 taglist = []
333 branchvers = []
334
335 # we will filter out tags that do no match the configured branches
336 for b in branchNames:
337 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
338 if basever:
339 branchvers.append(basever[1])
340
341 # grab tags from remote repository
342 alltags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
345
346 while True:
347 line = alltags.stdout.readline()
348
349 if not line:
350 break
351
352 (rev, tag) = line.split()
353
354 # does it match known format? ('vNN.NN.NN(-rcN)')
355 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
356
357 # only list valid tags matching configured branches
358 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
359 # if we want a specific tag, ignore all that don't match
360 if findtag and findtag != tagver[1]:
361 continue
362 taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]})
363
364 return taglist
365
366 @property
367 def choices(self):
368 taglist = [rt['tag'] for rt in self.getRevTags()]
369 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
370 taglist.insert(0, '')
371
372 self._choice_list = taglist
373
374 return self._choice_list
375
376 def updateFromKwargs(self, properties, kwargs, **unused):
377 tag = self.getFromKwargs(kwargs)
378 properties[self.name] = tag
379
380 # find the commit matching the tag
381 findtag = self.getRevTags(tag)
382
383 if not findtag:
384 raise ValidationError("Couldn't find tag")
385
386 properties['force_revision'] = findtag[0]['rev']
387
388 # find the branch matching the tag
389 branch = None
390 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
391 for b in branchNames:
392 if b.endswith(branchver[1]):
393 branch = b
394
395 if not branch:
396 raise ValidationError("Couldn't find branch")
397
398 properties['force_branch'] = branch
399
400 def parse_from_arg(self, s):
401 if self.strict and s not in self._choice_list:
402 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
403 return s
404
405 @util.renderer
406 @defer.inlineCallbacks
407 def builderNames(props):
408 """ since we have per branch and per target builders,
409 address the relevant builder for each new buildrequest
410 based on the request's desired branch and target.
411 """
412 branch = props.getProperty("branch")
413 target = props.getProperty("target", "")
414
415 if target == "all":
416 target = ""
417
418 # if that didn't work, try sourcestamp to find a branch
419 if not branch:
420 # match builders with target branch
421 ss = props.sourcestamps[0]
422 if ss:
423 branch = ss['branch']
424 else:
425 log.msg("couldn't find builder")
426 return [] # nothing works
427
428 bname = branch + "_" + target
429 builders = []
430
431 for b in (yield props.master.data.get(('builders',))):
432 if not b['name'].startswith(bname):
433 continue
434 builders.append(b['name'])
435
436 return builders
437
438 c['schedulers'] = []
439 c['schedulers'].append(AnyBranchScheduler(
440 name = "all",
441 change_filter = util.ChangeFilter(branch=branchNames),
442 treeStableTimer = 15*60,
443 builderNames = builderNames))
444
445 c['schedulers'].append(ForceScheduler(
446 name = "force",
447 buttonName = "Force builds",
448 label = "Force build details",
449 builderNames = [ "00_force_build" ],
450
451 codebases = [
452 util.CodebaseParameter(
453 "",
454 label = "Repository",
455 branch = util.FixedParameter(name = "branch", default = ""),
456 revision = util.FixedParameter(name = "revision", default = ""),
457 repository = util.FixedParameter(name = "repository", default = ""),
458 project = util.FixedParameter(name = "project", default = "")
459 )
460 ],
461
462 reason = util.StringParameter(
463 name = "reason",
464 label = "Reason",
465 default = "Trigger build",
466 required = True,
467 size = 80
468 ),
469
470 properties = [
471 # NB: avoid nesting to simplify processing of properties
472 util.ChoiceStringParameter(
473 name = "target",
474 label = "Build target",
475 default = "all",
476 choices = [ "all" ] + list(targets)
477 ),
478 TagChoiceParameter(
479 name = "tag",
480 label = "Build tag",
481 default = ""
482 )
483 ]
484 ))
485
486 c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames))
487
488 ####### BUILDERS
489
490 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
491 # what steps, and which workers can execute them. Note that any particular build will
492 # only take place on one worker.
493
494 def IsNoMasterBuild(step):
495 return step.getProperty("branch") != "master"
496
497 def IsUsignEnabled(step):
498 branch = step.getProperty("branch")
499 return branch and branches[branch].get("usign_key")
500
501 def IsSignEnabled(step):
502 branch = step.getProperty("branch")
503 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
504
505 def IsKmodArchiveEnabled(step):
506 branch = step.getProperty("branch")
507 return branch and branches[branch].get("kmod_archive")
508
509 def IsKmodArchiveAndRsyncEnabled(step):
510 branch = step.getProperty("branch")
511 return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
512
513 def GetBaseVersion(branch):
514 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
515 return branch.split('-')[1]
516 else:
517 return "master"
518
519 @properties.renderer
520 def GetVersionPrefix(props):
521 branch = props.getProperty("branch")
522 basever = GetBaseVersion(branch)
523 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
524 return "%s/" % props["tag"][1:]
525 elif basever != "master":
526 return "%s-SNAPSHOT/" % basever
527 else:
528 return ""
529
530 @util.renderer
531 def GetConfigSeed(props):
532 branch = props.getProperty("branch")
533 return branch and branches[branch].get("config_seed") or ""
534
535 @util.renderer
536 def GetRsyncParams(props, srcorbin, urlorkey):
537 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
538 branch = props.getProperty("branch")
539 opt = srcorbin + "_" + urlorkey
540 return branch and branches[branch].get(opt)
541
542 @util.renderer
543 def GetUsignKey(props):
544 branch = props.getProperty("branch")
545 return branch and branches[branch].get("usign_key")
546
547 def GetNextBuild(builder, requests):
548 for r in requests:
549 if r.properties:
550 # order tagged build first
551 if r.properties.hasProperty("tag"):
552 return r
553
554 r = requests[0]
555 #log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
556 return r
557
558 def MakeEnv(overrides=None, tryccache=False):
559 env = {
560 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
561 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
562 }
563 if tryccache:
564 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
565 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
566 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
567 else:
568 env['CC'] = env['CCC']
569 env['CXX'] = env['CCXX']
570 env['CCACHE'] = ''
571 if overrides is not None:
572 env.update(overrides)
573 return env
574
575 @properties.renderer
576 def NetLockDl(props, extralock=None):
577 lock = None
578 if props.hasProperty("dl_lock"):
579 lock = NetLocks[props["dl_lock"]]
580 if lock is not None:
581 return [lock.access('exclusive')]
582 else:
583 return []
584
585 @properties.renderer
586 def NetLockUl(props):
587 lock = None
588 if props.hasProperty("ul_lock"):
589 lock = NetLocks[props["ul_lock"]]
590 if lock is not None:
591 return [lock.access('exclusive')]
592 else:
593 return []
594
595 def IsTargetSelected(target):
596 def CheckTargetProperty(step):
597 selected_target = step.getProperty("target", "all")
598 if selected_target != "all" and selected_target != target:
599 return False
600 return True
601
602 return CheckTargetProperty
603
604 @util.renderer
605 def UsignSec2Pub(props):
606 branch = props.getProperty("branch")
607 try:
608 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
609 seckey = branches[branch].get("usign_key")
610 seckey = base64.b64decode(seckey)
611 except Exception:
612 return None
613
614 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
615 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
616
617
618 def canStartBuild(builder, wfb, request):
619 """ filter out non tag requests for tag_only workers. """
620 wtagonly = wfb.worker.properties.getProperty('tag_only')
621 tag = request.properties.getProperty('tag')
622
623 if wtagonly and not tag:
624 return False
625
626 return True
627
628 c['builders'] = []
629
630 workerNames = [ ]
631
632 for worker in c['workers']:
633 workerNames.append(worker.workername)
634
635 # add a single LocalWorker to handle the forcebuild builder
636 c['workers'].append(LocalWorker("__local_force_build", max_builds=1))
637
638 force_factory = BuildFactory()
639 force_factory.addStep(steps.Trigger(
640 name = "trigger_build",
641 schedulerNames = [ "trigger" ],
642 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
643 set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") },
644 ))
645
646 c['builders'].append(BuilderConfig(
647 name = "00_force_build",
648 workername = "__local_force_build",
649 factory = force_factory))
650
651
652 # NB the phase1 build factory assumes workers are single-build only
653 for target in targets:
654 ts = target.split('/')
655
656 factory = BuildFactory()
657
658 # setup shared work directory if required
659 factory.addStep(ShellCommand(
660 name = "sharedwd",
661 descriptionDone = "Shared work directory set up",
662 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
663 workdir = ".",
664 haltOnFailure = True,
665 ))
666
667 # find number of cores
668 factory.addStep(SetPropertyFromCommand(
669 name = "nproc",
670 property = "nproc",
671 description = "Finding number of CPUs",
672 command = ["nproc"],
673 ))
674
675 # find gcc and g++ compilers
676 factory.addStep(FileDownload(
677 name = "dlfindbinpl",
678 mastersrc = scripts_dir + '/findbin.pl',
679 workerdest = "../findbin.pl",
680 mode = 0o755,
681 ))
682
683 factory.addStep(SetPropertyFromCommand(
684 name = "gcc",
685 property = "cc_command",
686 description = "Finding gcc command",
687 command = ["../findbin.pl", "gcc", "", ""],
688 haltOnFailure = True,
689 ))
690
691 factory.addStep(SetPropertyFromCommand(
692 name = "g++",
693 property = "cxx_command",
694 description = "Finding g++ command",
695 command = ["../findbin.pl", "g++", "", ""],
696 haltOnFailure = True,
697 ))
698
699 # see if ccache is available
700 factory.addStep(SetPropertyFromCommand(
701 name = "ccache",
702 property = "ccache_command",
703 description = "Testing for ccache command",
704 command = ["which", "ccache"],
705 haltOnFailure = False,
706 flunkOnFailure = False,
707 warnOnFailure = False,
708 hideStepIf = lambda r, s: r==results.FAILURE,
709 ))
710
711 # check out the source
712 # Git() runs:
713 # if repo doesn't exist: 'git clone repourl'
714 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
715 # git cat-file -e <commit>
716 # git checkout -f <commit>
717 # git checkout -B <branch>
718 # git rev-parse HEAD
719 factory.addStep(Git(
720 name = "git",
721 repourl = repo_url,
722 mode = 'full',
723 method = 'fresh',
724 locks = NetLockDl,
725 haltOnFailure = True,
726 ))
727
728 # workaround for https://github.com/openwrt/buildbot/issues/5
729 factory.addStep(Git(
730 name = "git me once more please",
731 repourl = repo_url,
732 mode = 'full',
733 method = 'fresh',
734 locks = NetLockDl,
735 haltOnFailure = True,
736 ))
737
738 # update remote refs
739 factory.addStep(ShellCommand(
740 name = "fetchrefs",
741 description = "Fetching Git remote refs",
742 descriptionDone = "Git remote refs fetched",
743 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
744 haltOnFailure = True,
745 ))
746
747 # getver.sh requires local branches to track upstream otherwise version computation fails.
748 # Git() does not set tracking branches when cloning or switching, so work around this here
749 factory.addStep(ShellCommand(
750 name = "trackupstream",
751 description = "Setting upstream branch",
752 descriptionDone = "getver.sh is happy now",
753 command = ["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")],
754 haltOnFailure = True,
755 ))
756
757 # Verify that Git HEAD points to a tag or branch
758 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
759 factory.addStep(ShellCommand(
760 name = "gitverify",
761 description = "Ensuring that Git HEAD is pointing to a branch or tag",
762 descriptionDone = "Git HEAD is sane",
763 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
764 haltOnFailure = True,
765 ))
766
767 factory.addStep(StringDownload(
768 name = "ccachecc",
769 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
770 workerdest = "../ccache_cc.sh",
771 mode = 0o755,
772 ))
773
774 factory.addStep(StringDownload(
775 name = "ccachecxx",
776 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
777 workerdest = "../ccache_cxx.sh",
778 mode = 0o755,
779 ))
780
781 # feed
782 factory.addStep(ShellCommand(
783 name = "updatefeeds",
784 description = "Updating feeds",
785 command=["./scripts/feeds", "update"],
786 env = MakeEnv(tryccache=True),
787 haltOnFailure = True,
788 locks = NetLockDl,
789 ))
790
791 # feed
792 factory.addStep(ShellCommand(
793 name = "installfeeds",
794 description = "Installing feeds",
795 command=["./scripts/feeds", "install", "-a"],
796 env = MakeEnv(tryccache=True),
797 haltOnFailure = True,
798 ))
799
800 # seed config
801 factory.addStep(StringDownload(
802 name = "dlconfigseed",
803 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
804 workerdest = ".config",
805 mode = 0o644,
806 ))
807
808 # configure
809 factory.addStep(ShellCommand(
810 name = "newconfig",
811 descriptionDone = ".config seeded",
812 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
813 ))
814
815 factory.addStep(ShellCommand(
816 name = "defconfig",
817 description = "Populating .config",
818 command = ["make", "defconfig"],
819 env = MakeEnv(),
820 ))
821
822 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
823 factory.addStep(ShellCommand(
824 name = "checkarch",
825 description = "Checking architecture",
826 descriptionDone = "Architecture validated",
827 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
828 logEnviron = False,
829 want_stdout = False,
830 want_stderr = False,
831 haltOnFailure = True,
832 flunkOnFailure = False, # this is not a build FAILURE - TODO mark build as SKIPPED
833 ))
834
835 # find libc suffix
836 factory.addStep(SetPropertyFromCommand(
837 name = "libc",
838 property = "libc",
839 description = "Finding libc suffix",
840 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
841 ))
842
843 # install build key
844 factory.addStep(StringDownload(
845 name = "dlkeybuildpub",
846 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
847 workerdest = "key-build.pub",
848 mode = 0o600,
849 doStepIf = IsUsignEnabled,
850 ))
851
852 factory.addStep(StringDownload(
853 name = "dlkeybuild",
854 s = "# fake private key",
855 workerdest = "key-build",
856 mode = 0o600,
857 doStepIf = IsUsignEnabled,
858 ))
859
860 factory.addStep(StringDownload(
861 name = "dlkeybuilducert",
862 s = "# fake certificate",
863 workerdest = "key-build.ucert",
864 mode = 0o600,
865 doStepIf = IsUsignEnabled,
866 ))
867
868 # prepare dl
869 factory.addStep(ShellCommand(
870 name = "dldir",
871 description = "Preparing dl/",
872 descriptionDone = "dl/ prepared",
873 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
874 workdir = Property("builddir"),
875 logEnviron = False,
876 want_stdout = False,
877 ))
878
879 # cleanup dl
880 factory.addStep(ShellCommand(
881 name = "dlprune",
882 description = "Pruning dl/",
883 descriptionDone = "dl/ pruned",
884 command = 'find dl/ -mindepth 1 -atime +15 -delete -print',
885 logEnviron = False,
886 ))
887
888 # prepare tar
889 factory.addStep(ShellCommand(
890 name = "dltar",
891 description = "Building and installing GNU tar",
892 descriptionDone = "GNU tar built and installed",
893 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
894 env = MakeEnv(tryccache=True),
895 haltOnFailure = True,
896 ))
897
898 # populate dl
899 factory.addStep(ShellCommand(
900 name = "dlrun",
901 description = "Populating dl/",
902 descriptionDone = "dl/ populated",
903 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
904 env = MakeEnv(),
905 logEnviron = False,
906 locks = NetLockDl,
907 ))
908
909 factory.addStep(ShellCommand(
910 name = "cleanbase",
911 description = "Cleaning base-files",
912 command=["make", "package/base-files/clean", "V=s"],
913 ))
914
915 # build
916 factory.addStep(ShellCommand(
917 name = "tools",
918 description = "Building and installing tools",
919 descriptionDone = "Tools built and installed",
920 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
921 env = MakeEnv(tryccache=True),
922 haltOnFailure = True,
923 ))
924
925 factory.addStep(ShellCommand(
926 name = "toolchain",
927 description = "Building and installing toolchain",
928 descriptionDone = "Toolchain built and installed",
929 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
930 env = MakeEnv(),
931 haltOnFailure = True,
932 ))
933
934 factory.addStep(ShellCommand(
935 name = "kmods",
936 description = "Building kmods",
937 descriptionDone = "Kmods built",
938 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
939 env = MakeEnv(),
940 haltOnFailure = True,
941 ))
942
943 # find kernel version
944 factory.addStep(SetPropertyFromCommand(
945 name = "kernelversion",
946 property = "kernelversion",
947 description = "Finding the effective Kernel version",
948 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
949 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
950 ))
951
952 factory.addStep(ShellCommand(
953 name = "pkgclean",
954 description = "Cleaning up package build",
955 descriptionDone = "Package build cleaned up",
956 command=["make", "package/cleanup", "V=s"],
957 ))
958
959 factory.addStep(ShellCommand(
960 name = "pkgbuild",
961 description = "Building packages",
962 descriptionDone = "Packages built",
963 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
964 env = MakeEnv(),
965 haltOnFailure = True,
966 ))
967
968 factory.addStep(ShellCommand(
969 name = "pkginstall",
970 description = "Installing packages",
971 descriptionDone = "Packages installed",
972 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
973 env = MakeEnv(),
974 haltOnFailure = True,
975 ))
976
977 factory.addStep(ShellCommand(
978 name = "pkgindex",
979 description = "Indexing packages",
980 descriptionDone = "Packages indexed",
981 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
982 env = MakeEnv(),
983 haltOnFailure = True,
984 ))
985
986 factory.addStep(ShellCommand(
987 name = "images",
988 description = "Building and installing images",
989 descriptionDone = "Images built and installed",
990 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
991 env = MakeEnv(),
992 haltOnFailure = True,
993 ))
994
995 factory.addStep(ShellCommand(
996 name = "buildinfo",
997 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
998 command = "make -j1 buildinfo V=s || true",
999 env = MakeEnv(),
1000 haltOnFailure = True,
1001 ))
1002
1003 factory.addStep(ShellCommand(
1004 name = "json_overview_image_info",
1005 description = "Generating profiles.json in target folder",
1006 command = "make -j1 json_overview_image_info V=s || true",
1007 env = MakeEnv(),
1008 haltOnFailure = True,
1009 ))
1010
1011 factory.addStep(ShellCommand(
1012 name = "checksums",
1013 description = "Calculating checksums",
1014 descriptionDone = "Checksums calculated",
1015 command=["make", "-j1", "checksum", "V=s"],
1016 env = MakeEnv(),
1017 haltOnFailure = True,
1018 ))
1019
1020 factory.addStep(ShellCommand(
1021 name = "kmoddir",
1022 descriptionDone = "Kmod directory created",
1023 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1024 haltOnFailure = True,
1025 doStepIf = IsKmodArchiveEnabled,
1026 ))
1027
1028 factory.addStep(ShellCommand(
1029 name = "kmodprepare",
1030 description = "Preparing kmod archive",
1031 descriptionDone = "Kmod archive prepared",
1032 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1033 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1034 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1035 haltOnFailure = True,
1036 doStepIf = IsKmodArchiveEnabled,
1037 ))
1038
1039 factory.addStep(ShellCommand(
1040 name = "kmodindex",
1041 description = "Indexing kmod archive",
1042 descriptionDone = "Kmod archive indexed",
1043 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1044 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1045 env = MakeEnv(),
1046 haltOnFailure = True,
1047 doStepIf = IsKmodArchiveEnabled,
1048 ))
1049
1050 # sign
1051 factory.addStep(MasterShellCommand(
1052 name = "signprepare",
1053 descriptionDone = "Temporary signing directory prepared",
1054 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1055 haltOnFailure = True,
1056 doStepIf = IsSignEnabled,
1057
1058 ))
1059
1060 factory.addStep(ShellCommand(
1061 name = "signpack",
1062 description = "Packing files to sign",
1063 descriptionDone = "Files to sign packed",
1064 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1065 haltOnFailure = True,
1066 doStepIf = IsSignEnabled,
1067 ))
1068
1069 factory.addStep(FileUpload(
1070 workersrc = "sign.tar.gz",
1071 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1072 haltOnFailure = True,
1073 doStepIf = IsSignEnabled,
1074 ))
1075
1076 factory.addStep(MasterShellCommand(
1077 name = "signfiles",
1078 description = "Signing files",
1079 descriptionDone = "Files signed",
1080 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
1081 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1082 haltOnFailure = True,
1083 doStepIf = IsSignEnabled,
1084 ))
1085
1086 factory.addStep(FileDownload(
1087 name = "dlsigntargz",
1088 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1089 workerdest = "sign.tar.gz",
1090 haltOnFailure = True,
1091 doStepIf = IsSignEnabled,
1092 ))
1093
1094 factory.addStep(ShellCommand(
1095 name = "signunpack",
1096 description = "Unpacking signed files",
1097 descriptionDone = "Signed files unpacked",
1098 command = ["tar", "-xzf", "sign.tar.gz"],
1099 haltOnFailure = True,
1100 doStepIf = IsSignEnabled,
1101 ))
1102
1103 # upload
1104 factory.addStep(ShellCommand(
1105 name = "dirprepare",
1106 descriptionDone = "Upload directory structure prepared",
1107 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1108 haltOnFailure = True,
1109 ))
1110
1111 factory.addStep(ShellCommand(
1112 name = "linkprepare",
1113 descriptionDone = "Repository symlink prepared",
1114 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1115 doStepIf = IsNoMasterBuild,
1116 haltOnFailure = True,
1117 ))
1118
1119 factory.addStep(ShellCommand(
1120 name = "kmoddirprepare",
1121 descriptionDone = "Kmod archive upload directory prepared",
1122 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1123 haltOnFailure = True,
1124 doStepIf = IsKmodArchiveEnabled,
1125 ))
1126
1127 factory.addStep(ShellCommand(
1128 name = "dirupload",
1129 description = "Uploading directory structure",
1130 descriptionDone = "Directory structure uploaded",
1131 command = ["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")] + rsync_defopts + ["tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1132 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1133 haltOnFailure = True,
1134 logEnviron = False,
1135 locks = NetLockUl,
1136 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1137 ))
1138
1139 # download remote sha256sums to 'target-sha256sums'
1140 factory.addStep(ShellCommand(
1141 name = "target-sha256sums",
1142 description = "Fetching remote sha256sums for target",
1143 descriptionDone = "Remote sha256sums for target fetched",
1144 command = ["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")] + rsync_defopts + [Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1145 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1146 logEnviron = False,
1147 haltOnFailure = False,
1148 flunkOnFailure = False,
1149 warnOnFailure = False,
1150 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1151 ))
1152
1153 # build list of files to upload
1154 factory.addStep(FileDownload(
1155 name = "dlsha2rsyncpl",
1156 mastersrc = scripts_dir + '/sha2rsync.pl',
1157 workerdest = "../sha2rsync.pl",
1158 mode = 0o755,
1159 ))
1160
1161 factory.addStep(ShellCommand(
1162 name = "buildlist",
1163 description = "Building list of files to upload",
1164 descriptionDone = "List of files to upload built",
1165 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1166 haltOnFailure = True,
1167 ))
1168
1169 factory.addStep(FileDownload(
1170 name = "dlrsync.sh",
1171 mastersrc = scripts_dir + '/rsync.sh',
1172 workerdest = "../rsync.sh",
1173 mode = 0o755,
1174 ))
1175
1176 # upload new files and update existing ones
1177 factory.addStep(ShellCommand(
1178 name = "targetupload",
1179 description = "Uploading target files",
1180 descriptionDone = "Target files uploaded",
1181 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1182 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1183 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1184 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1185 haltOnFailure = True,
1186 logEnviron = False,
1187 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1188 ))
1189
1190 # delete files which don't exist locally
1191 factory.addStep(ShellCommand(
1192 name = "targetprune",
1193 description = "Pruning target files",
1194 descriptionDone = "Target files pruned",
1195 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1196 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1197 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1198 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1199 haltOnFailure = True,
1200 logEnviron = False,
1201 locks = NetLockUl,
1202 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1203 ))
1204
1205 factory.addStep(ShellCommand(
1206 name = "kmodupload",
1207 description = "Uploading kmod archive",
1208 descriptionDone = "Kmod archive uploaded",
1209 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1210 [Interpolate("-a%(prop:rsync_ipv4:+4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1211 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1212 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1213 haltOnFailure = True,
1214 logEnviron = False,
1215 locks = NetLockUl,
1216 doStepIf = IsKmodArchiveAndRsyncEnabled,
1217 ))
1218
1219 factory.addStep(ShellCommand(
1220 name = "sourcelist",
1221 description = "Finding source archives to upload",
1222 descriptionDone = "Source archives to upload found",
1223 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1224 haltOnFailure = True,
1225 ))
1226
1227 factory.addStep(ShellCommand(
1228 name = "sourceupload",
1229 description = "Uploading source archives",
1230 descriptionDone = "Source archives uploaded",
1231 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1232 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), Interpolate("-a%(prop:rsync_ipv4:+4)s"), "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1233 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1234 haltOnFailure = True,
1235 logEnviron = False,
1236 locks = NetLockUl,
1237 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1238 ))
1239
1240 factory.addStep(ShellCommand(
1241 name = "df",
1242 description = "Reporting disk usage",
1243 command=["df", "-h", "."],
1244 env={'LC_ALL': 'C'},
1245 logEnviron = False,
1246 haltOnFailure = False,
1247 flunkOnFailure = False,
1248 warnOnFailure = False,
1249 alwaysRun = True,
1250 ))
1251
1252 factory.addStep(ShellCommand(
1253 name = "du",
1254 description = "Reporting estimated file space usage",
1255 command=["du", "-sh", "."],
1256 env={'LC_ALL': 'C'},
1257 logEnviron = False,
1258 haltOnFailure = False,
1259 flunkOnFailure = False,
1260 warnOnFailure = False,
1261 alwaysRun = True,
1262 ))
1263
1264 factory.addStep(ShellCommand(
1265 name = "ccachestat",
1266 description = "Reporting ccache stats",
1267 command=["ccache", "-s"],
1268 logEnviron = False,
1269 want_stderr = False,
1270 haltOnFailure = False,
1271 flunkOnFailure = False,
1272 warnOnFailure = False,
1273 doStepIf = util.Transform(bool, Property("ccache_command")),
1274 ))
1275
1276 for brname in branchNames:
1277 bldrname = brname + "_" + target
1278 c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, tags=[brname,], nextBuild=GetNextBuild, canStartBuild=canStartBuild))
1279
1280
1281 ####### STATUS TARGETS
1282
1283 # 'status' is a list of Status Targets. The results of each build will be
1284 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1285 # including web pages, email senders, and IRC bots.
1286
1287 if "status_bind" in inip1:
1288 c['www'] = {
1289 'port': inip1.get("status_bind"),
1290 'plugins': {
1291 'waterfall_view': True,
1292 'console_view': True,
1293 'grid_view': True
1294 }
1295 }
1296
1297 if "status_user" in inip1 and "status_password" in inip1:
1298 c['www']['auth'] = util.UserPasswordAuth([
1299 (inip1.get("status_user"), inip1.get("status_password"))
1300 ])
1301 c['www']['authz'] = util.Authz(
1302 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1303 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1304 )
1305
1306 c['services'] = []
1307 if ini.has_section("irc"):
1308 iniirc = ini['irc']
1309 irc_host = iniirc.get("host", None)
1310 irc_port = iniirc.getint("port", 6667)
1311 irc_chan = iniirc.get("channel", None)
1312 irc_nick = iniirc.get("nickname", None)
1313 irc_pass = iniirc.get("password", None)
1314
1315 if irc_host and irc_nick and irc_chan:
1316 irc = reporters.IRC(irc_host, irc_nick,
1317 port = irc_port,
1318 password = irc_pass,
1319 channels = [ irc_chan ],
1320 notify_events = [ 'exception', 'problem', 'recovery' ]
1321 )
1322
1323 c['services'].append(irc)
1324
1325 c['revlink'] = util.RevlinkMatch([
1326 r'https://git.openwrt.org/openwrt/(.*).git'
1327 ],
1328 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1329
1330 ####### DB URL
1331
1332 c['db'] = {
1333 # This specifies what database buildbot uses to store its state. You can leave
1334 # this at its default for all but the largest installations.
1335 'db_url' : "sqlite:///state.sqlite",
1336 }
1337
1338 c['buildbotNetUsageData'] = None