phase1: remove checkBuiltin deadcode
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini['phase1']
58
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
62
63 ####### PROJECT IDENTITY
64
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
68
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
71
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
76 # without some help.
77
78 c['buildbotURL'] = inip1.get("buildbot_url")
79
80 ####### BUILDWORKERS
81
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
85
86 c['workers'] = []
87 NetLocks = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 max_builds = 1
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
100 if max_builds == 1:
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
120
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
124
125 # coalesce builds
126 c['collapseRequests'] = True
127
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
131 hour=6,
132 )]
133
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
141
142 @returns: datetime instance or None, via Deferred
143 """
144
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
148 [
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
151 ],
152 order=['-complete_at'], limit=1)
153 if not completed:
154 return
155
156 complete_at = completed[0]['complete_at']
157
158 last_build = yield bldr.master.data.get(
159 ('builds', ),
160 [
161 resultspec.Filter('builderid', 'eq', [bldrid]),
162 ],
163 order=['-started_at'], limit=1)
164
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
169
170 return complete_at
171
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
175 not skipped build.
176
177 @returns: list of sorted builders
178 """
179
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
182
183 def bldr_info(bldr):
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
186 return d
187
188 def bldr_sort(item):
189 (complete_at, bldr) = item
190
191 if not complete_at:
192 date = datetime.min
193 complete_at = date.replace(tzinfo=tzutc())
194
195 if is_building(bldr):
196 date = datetime.max
197 complete_at = date.replace(tzinfo=tzutc())
198
199 return (complete_at, bldr.name)
200
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
203
204 for r in results:
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
206
207 return [r[1] for r in results]
208
209 c['prioritizeBuilders'] = prioritizeBuilders
210
211 ####### CHANGESOURCES
212
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
215
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
218
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
221
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
225
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
228
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
232
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
235
236 usign_key = None
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
238
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
242
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
244
245
246 # find targets
247 targets = [ ]
248
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
251 else:
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
253
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
257
258 while True:
259 line = findtargets.stdout.readline()
260 if not line:
261 break
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
264
265
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
268
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
271 repo_url,
272 workdir=work_dir+'/work.git', branch=repo_branch,
273 pollinterval=300))
274
275 ####### SCHEDULERS
276
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
279
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
282 type = "list"
283 strict = True
284
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
288
289 @property
290 def choices(self):
291 taglist = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
293
294 if basever:
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
298
299 while True:
300 line = findtags.stdout.readline()
301
302 if not line:
303 break
304
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
306
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
309
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
312
313 self._choice_list = taglist
314
315 return self._choice_list
316
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
320 return s
321
322 c['schedulers'] = []
323 c['schedulers'].append(SingleBranchScheduler(
324 name = "all",
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
328
329 c['schedulers'].append(ForceScheduler(
330 name = "force",
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
334
335 codebases = [
336 util.CodebaseParameter(
337 "",
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
343 )
344 ],
345
346 reason = util.StringParameter(
347 name = "reason",
348 label = "Reason",
349 default = "Trigger build",
350 required = True,
351 size = 80
352 ),
353
354 properties = [
355 util.NestedParameter(
356 name="options",
357 label="Build Options",
358 layout="vertical",
359 fields=[
360 util.ChoiceStringParameter(
361 name = "target",
362 label = "Build target",
363 default = "all",
364 choices = [ "all" ] + targets
365 ),
366 TagChoiceParameter(
367 name = "tag",
368 label = "Build tag",
369 default = ""
370 )
371 ]
372 )
373 ]
374 ))
375
376 ####### BUILDERS
377
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
381
382 CleanTargetMap = [
383 [ "tools", "tools/clean" ],
384 [ "chain", "toolchain/clean" ],
385 [ "linux", "target/linux/clean" ],
386 [ "dir", "dirclean" ],
387 [ "dist", "distclean" ]
388 ]
389
390 def IsMakeCleanRequested(pattern):
391 def CheckCleanProperty(step):
392 val = step.getProperty("clean")
393 if val and re.match(pattern, val):
394 return True
395 else:
396 return False
397
398 return CheckCleanProperty
399
400 def IsSharedWorkdir(step):
401 return bool(step.getProperty("shared_wd"))
402
403 def IsCleanupRequested(step):
404 if IsSharedWorkdir(step):
405 return False
406 do_cleanup = step.getProperty("do_cleanup")
407 if do_cleanup:
408 return True
409 else:
410 return False
411
412 def IsExpireRequested(step):
413 if IsSharedWorkdir(step):
414 return False
415 else:
416 return not IsCleanupRequested(step)
417
418 def IsGitFreshRequested(step):
419 do_cleanup = step.getProperty("do_cleanup")
420 if do_cleanup:
421 return True
422 else:
423 return False
424
425 def IsGitCleanRequested(step):
426 return not IsGitFreshRequested(step)
427
428 def IsTaggingRequested(step):
429 val = step.getProperty("tag")
430 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
431 return True
432 else:
433 return False
434
435 def IsNoTaggingRequested(step):
436 return not IsTaggingRequested(step)
437
438 def IsNoMasterBuild(step):
439 return repo_branch != "master"
440
441 def GetBaseVersion():
442 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
443 return repo_branch.split('-')[1]
444 else:
445 return "master"
446
447 @properties.renderer
448 def GetVersionPrefix(props):
449 basever = GetBaseVersion()
450 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
451 return "%s/" % props["tag"]
452 elif basever != "master":
453 return "%s-SNAPSHOT/" % basever
454 else:
455 return ""
456
457 @properties.renderer
458 def GetNumJobs(props):
459 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
460 return str(int(int(props["nproc"]) / props["max_builds"]))
461 else:
462 return "1"
463
464 @properties.renderer
465 def GetCC(props):
466 if props.hasProperty("cc_command"):
467 return props["cc_command"]
468 else:
469 return "gcc"
470
471 @properties.renderer
472 def GetCXX(props):
473 if props.hasProperty("cxx_command"):
474 return props["cxx_command"]
475 else:
476 return "g++"
477
478 @properties.renderer
479 def GetCwd(props):
480 if props.hasProperty("builddir"):
481 return props["builddir"]
482 elif props.hasProperty("workdir"):
483 return props["workdir"]
484 else:
485 return "/"
486
487 @properties.renderer
488 def GetCCache(props):
489 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
490 return props["ccache_command"]
491 else:
492 return ""
493
494 def GetNextBuild(builder, requests):
495 for r in requests:
496 if r.properties and r.properties.hasProperty("tag"):
497 return r
498
499 r = requests[0]
500 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
501 return r
502
503 def MakeEnv(overrides=None, tryccache=False):
504 env = {
505 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
506 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
507 }
508 if tryccache:
509 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
510 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
511 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
512 else:
513 env['CC'] = env['CCC']
514 env['CXX'] = env['CCXX']
515 env['CCACHE'] = ''
516 if overrides is not None:
517 env.update(overrides)
518 return env
519
520 @properties.renderer
521 def NetLockDl(props):
522 lock = None
523 if props.hasProperty("dl_lock"):
524 lock = NetLocks[props["dl_lock"]]
525 if lock is not None:
526 return [lock.access('exclusive')]
527 else:
528 return []
529
530 @properties.renderer
531 def NetLockUl(props):
532 lock = None
533 if props.hasProperty("ul_lock"):
534 lock = NetLocks[props["ul_lock"]]
535 if lock is not None:
536 return [lock.access('exclusive')]
537 else:
538 return []
539
540 @util.renderer
541 def TagPropertyValue(props):
542 if props.hasProperty("options"):
543 options = props.getProperty("options")
544 if type(options) is dict:
545 return options.get("tag")
546 return None
547
548 def IsTargetSelected(target):
549 def CheckTargetProperty(step):
550 try:
551 options = step.getProperty("options")
552 if type(options) is dict:
553 selected_target = options.get("target", "all")
554 if selected_target != "all" and selected_target != target:
555 return False
556 except KeyError:
557 pass
558
559 return True
560
561 return CheckTargetProperty
562
563 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
564 try:
565 seckey = base64.b64decode(seckey)
566 except:
567 return None
568
569 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
570 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
571
572
573 c['builders'] = []
574
575 dlLock = locks.WorkerLock("worker_dl")
576
577 workerNames = [ ]
578
579 for worker in c['workers']:
580 workerNames.append(worker.workername)
581
582 force_factory = BuildFactory()
583
584 c['builders'].append(BuilderConfig(
585 name = "00_force_build",
586 workernames = workerNames,
587 factory = force_factory))
588
589 for target in targets:
590 ts = target.split('/')
591
592 factory = BuildFactory()
593
594 # setup shared work directory if required
595 factory.addStep(ShellCommand(
596 name = "sharedwd",
597 description = "Setting up shared work directory",
598 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
599 workdir = ".",
600 haltOnFailure = True,
601 doStepIf = IsSharedWorkdir))
602
603 # find number of cores
604 factory.addStep(SetPropertyFromCommand(
605 name = "nproc",
606 property = "nproc",
607 description = "Finding number of CPUs",
608 command = ["nproc"]))
609
610 # find gcc and g++ compilers
611 factory.addStep(FileDownload(
612 name = "dlfindbinpl",
613 mastersrc = scripts_dir + '/findbin.pl',
614 workerdest = "../findbin.pl",
615 mode = 0o755))
616
617 factory.addStep(SetPropertyFromCommand(
618 name = "gcc",
619 property = "cc_command",
620 description = "Finding gcc command",
621 command = [
622 "../findbin.pl", "gcc", "", "",
623 ],
624 haltOnFailure = True))
625
626 factory.addStep(SetPropertyFromCommand(
627 name = "g++",
628 property = "cxx_command",
629 description = "Finding g++ command",
630 command = [
631 "../findbin.pl", "g++", "", "",
632 ],
633 haltOnFailure = True))
634
635 # see if ccache is available
636 factory.addStep(SetPropertyFromCommand(
637 property = "ccache_command",
638 command = ["which", "ccache"],
639 description = "Testing for ccache command",
640 haltOnFailure = False,
641 flunkOnFailure = False,
642 warnOnFailure = False,
643 ))
644
645 # expire tree if needed
646 if tree_expire > 0:
647 factory.addStep(FileDownload(
648 name = "dlexpiresh",
649 doStepIf = IsExpireRequested,
650 mastersrc = scripts_dir + '/expire.sh',
651 workerdest = "../expire.sh",
652 mode = 0o755))
653
654 factory.addStep(ShellCommand(
655 name = "expire",
656 description = "Checking for build tree expiry",
657 command = ["./expire.sh", str(tree_expire)],
658 workdir = ".",
659 haltOnFailure = True,
660 doStepIf = IsExpireRequested,
661 timeout = 2400))
662
663 # cleanup.sh if needed
664 factory.addStep(FileDownload(
665 name = "dlcleanupsh",
666 mastersrc = scripts_dir + '/cleanup.sh',
667 workerdest = "../cleanup.sh",
668 mode = 0o755,
669 doStepIf = IsCleanupRequested))
670
671 factory.addStep(ShellCommand(
672 name = "cleanold",
673 description = "Cleaning previous builds",
674 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
675 workdir = ".",
676 haltOnFailure = True,
677 doStepIf = IsCleanupRequested,
678 timeout = 2400))
679
680 factory.addStep(ShellCommand(
681 name = "cleanup",
682 description = "Cleaning work area",
683 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
684 workdir = ".",
685 haltOnFailure = True,
686 doStepIf = IsCleanupRequested,
687 timeout = 2400))
688
689 # user-requested clean targets
690 for tuple in CleanTargetMap:
691 factory.addStep(ShellCommand(
692 name = tuple[1],
693 description = 'User-requested "make %s"' % tuple[1],
694 command = ["make", tuple[1], "V=s"],
695 env = MakeEnv(),
696 doStepIf = IsMakeCleanRequested(tuple[0])
697 ))
698
699 # Workaround bug when switching from a checked out tag back to a branch
700 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
701 factory.addStep(ShellCommand(
702 name = "gitcheckout",
703 description = "Ensure that Git HEAD is sane",
704 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
705 haltOnFailure = True))
706
707 # check out the source
708 # Git() runs:
709 # if repo doesn't exist: 'git clone repourl'
710 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
711 # 'git fetch -t repourl branch; git reset --hard revision'
712 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
713 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
714 factory.addStep(Git(
715 name = "gitclean",
716 repourl = repo_url,
717 branch = repo_branch,
718 mode = 'full',
719 method = 'clean',
720 locks = NetLockDl,
721 haltOnFailure = True,
722 doStepIf = IsGitCleanRequested,
723 ))
724
725 factory.addStep(Git(
726 name = "gitfresh",
727 repourl = repo_url,
728 branch = repo_branch,
729 mode = 'full',
730 method = 'fresh',
731 locks = NetLockDl,
732 haltOnFailure = True,
733 doStepIf = IsGitFreshRequested,
734 ))
735
736 # update remote refs
737 factory.addStep(ShellCommand(
738 name = "fetchrefs",
739 description = "Fetching Git remote refs",
740 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
741 haltOnFailure = True
742 ))
743
744 # switch to tag
745 factory.addStep(ShellCommand(
746 name = "switchtag",
747 description = "Checking out Git tag",
748 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
749 haltOnFailure = True,
750 doStepIf = IsTaggingRequested
751 ))
752
753 # Verify that Git HEAD points to a tag or branch
754 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
755 factory.addStep(ShellCommand(
756 name = "gitverify",
757 description = "Ensure that Git HEAD is pointing to a branch or tag",
758 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
759 haltOnFailure = True))
760
761 factory.addStep(ShellCommand(
762 name = "rmtmp",
763 description = "Remove tmp folder",
764 command=["rm", "-rf", "tmp/"]))
765
766 # feed
767 # factory.addStep(ShellCommand(
768 # name = "feedsconf",
769 # description = "Copy the feeds.conf",
770 # command='''cp ~/feeds.conf ./feeds.conf''' ))
771
772 # feed
773 factory.addStep(ShellCommand(
774 name = "rmfeedlinks",
775 description = "Remove feed symlinks",
776 command=["rm", "-rf", "package/feeds/"]))
777
778 factory.addStep(StringDownload(
779 name = "ccachecc",
780 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
781 workerdest = "../ccache_cc.sh",
782 mode = 0o755,
783 ))
784
785 factory.addStep(StringDownload(
786 name = "ccachecxx",
787 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
788 workerdest = "../ccache_cxx.sh",
789 mode = 0o755,
790 ))
791
792 # feed
793 factory.addStep(ShellCommand(
794 name = "updatefeeds",
795 description = "Updating feeds",
796 command=["./scripts/feeds", "update"],
797 env = MakeEnv(tryccache=True),
798 haltOnFailure = True,
799 locks = NetLockDl,
800 ))
801
802 # feed
803 factory.addStep(ShellCommand(
804 name = "installfeeds",
805 description = "Installing feeds",
806 command=["./scripts/feeds", "install", "-a"],
807 env = MakeEnv(tryccache=True),
808 haltOnFailure = True
809 ))
810
811 # seed config
812 if config_seed is not None:
813 factory.addStep(StringDownload(
814 name = "dlconfigseed",
815 s = config_seed + '\n',
816 workerdest = ".config",
817 mode = 0o644
818 ))
819
820 # configure
821 factory.addStep(ShellCommand(
822 name = "newconfig",
823 description = "Seeding .config",
824 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
825 ))
826
827 factory.addStep(ShellCommand(
828 name = "delbin",
829 description = "Removing output directory",
830 command = ["rm", "-rf", "bin/"]
831 ))
832
833 factory.addStep(ShellCommand(
834 name = "defconfig",
835 description = "Populating .config",
836 command = ["make", "defconfig"],
837 env = MakeEnv()
838 ))
839
840 # check arch
841 factory.addStep(ShellCommand(
842 name = "checkarch",
843 description = "Checking architecture",
844 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
845 logEnviron = False,
846 want_stdout = False,
847 want_stderr = False,
848 haltOnFailure = True
849 ))
850
851 # find libc suffix
852 factory.addStep(SetPropertyFromCommand(
853 name = "libc",
854 property = "libc",
855 description = "Finding libc suffix",
856 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
857
858 # install build key
859 if usign_key is not None:
860 factory.addStep(StringDownload(
861 name = "dlkeybuildpub",
862 s = UsignSec2Pub(usign_key, usign_comment),
863 workerdest = "key-build.pub",
864 mode = 0o600,
865 ))
866
867 factory.addStep(StringDownload(
868 name = "dlkeybuild",
869 s = "# fake private key",
870 workerdest = "key-build",
871 mode = 0o600,
872 ))
873
874 factory.addStep(StringDownload(
875 name = "dlkeybuilducert",
876 s = "# fake certificate",
877 workerdest = "key-build.ucert",
878 mode = 0o600,
879 ))
880
881 # prepare dl
882 factory.addStep(ShellCommand(
883 name = "dldir",
884 description = "Preparing dl/",
885 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
886 logEnviron = False,
887 want_stdout = False
888 ))
889
890 # prepare tar
891 factory.addStep(ShellCommand(
892 name = "dltar",
893 description = "Building and installing GNU tar",
894 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
895 env = MakeEnv(tryccache=True),
896 haltOnFailure = True
897 ))
898
899 # populate dl
900 factory.addStep(ShellCommand(
901 name = "dlrun",
902 description = "Populating dl/",
903 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
904 env = MakeEnv(),
905 logEnviron = False,
906 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
907 ))
908
909 factory.addStep(ShellCommand(
910 name = "cleanbase",
911 description = "Cleaning base-files",
912 command=["make", "package/base-files/clean", "V=s"]
913 ))
914
915 # build
916 factory.addStep(ShellCommand(
917 name = "tools",
918 description = "Building and installing tools",
919 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
920 env = MakeEnv(tryccache=True),
921 haltOnFailure = True
922 ))
923
924 factory.addStep(ShellCommand(
925 name = "toolchain",
926 description = "Building and installing toolchain",
927 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
928 env = MakeEnv(),
929 haltOnFailure = True
930 ))
931
932 factory.addStep(ShellCommand(
933 name = "kmods",
934 description = "Building kmods",
935 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
936 env = MakeEnv(),
937 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
938 haltOnFailure = True
939 ))
940
941 # find kernel version
942 factory.addStep(SetPropertyFromCommand(
943 name = "kernelversion",
944 property = "kernelversion",
945 description = "Finding the effective Kernel version",
946 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
947 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
948 ))
949
950 factory.addStep(ShellCommand(
951 name = "pkgclean",
952 description = "Cleaning up package build",
953 command=["make", "package/cleanup", "V=s"]
954 ))
955
956 factory.addStep(ShellCommand(
957 name = "pkgbuild",
958 description = "Building packages",
959 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
960 env = MakeEnv(),
961 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
962 haltOnFailure = True
963 ))
964
965 factory.addStep(ShellCommand(
966 name = "pkginstall",
967 description = "Installing packages",
968 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
969 env = MakeEnv(),
970 haltOnFailure = True
971 ))
972
973 factory.addStep(ShellCommand(
974 name = "pkgindex",
975 description = "Indexing packages",
976 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
977 env = MakeEnv(),
978 haltOnFailure = True
979 ))
980
981 factory.addStep(ShellCommand(
982 name = "images",
983 description = "Building and installing images",
984 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
985 env = MakeEnv(),
986 haltOnFailure = True
987 ))
988
989 factory.addStep(ShellCommand(
990 name = "buildinfo",
991 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
992 command = "make -j1 buildinfo V=s || true",
993 env = MakeEnv(),
994 haltOnFailure = True
995 ))
996
997 factory.addStep(ShellCommand(
998 name = "json_overview_image_info",
999 description = "Generate profiles.json in target folder",
1000 command = "make -j1 json_overview_image_info V=s || true",
1001 env = MakeEnv(),
1002 haltOnFailure = True
1003 ))
1004
1005 factory.addStep(ShellCommand(
1006 name = "checksums",
1007 description = "Calculating checksums",
1008 command=["make", "-j1", "checksum", "V=s"],
1009 env = MakeEnv(),
1010 haltOnFailure = True
1011 ))
1012
1013 if enable_kmod_archive:
1014 factory.addStep(ShellCommand(
1015 name = "kmoddir",
1016 description = "Creating kmod directory",
1017 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1018 haltOnFailure = True
1019 ))
1020
1021 factory.addStep(ShellCommand(
1022 name = "kmodprepare",
1023 description = "Preparing kmod archive",
1024 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1025 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1026 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1027 haltOnFailure = True
1028 ))
1029
1030 factory.addStep(ShellCommand(
1031 name = "kmodindex",
1032 description = "Indexing kmod archive",
1033 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1034 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1035 env = MakeEnv(),
1036 haltOnFailure = True
1037 ))
1038
1039 # sign
1040 if ini.has_option("gpg", "key") or usign_key is not None:
1041 factory.addStep(MasterShellCommand(
1042 name = "signprepare",
1043 description = "Preparing temporary signing directory",
1044 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1045 haltOnFailure = True
1046 ))
1047
1048 factory.addStep(ShellCommand(
1049 name = "signpack",
1050 description = "Packing files to sign",
1051 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1052 haltOnFailure = True
1053 ))
1054
1055 factory.addStep(FileUpload(
1056 workersrc = "sign.tar.gz",
1057 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1058 haltOnFailure = True
1059 ))
1060
1061 factory.addStep(MasterShellCommand(
1062 name = "signfiles",
1063 description = "Signing files",
1064 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
1065 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1066 haltOnFailure = True
1067 ))
1068
1069 factory.addStep(FileDownload(
1070 name = "dlsigntargz",
1071 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1072 workerdest = "sign.tar.gz",
1073 haltOnFailure = True
1074 ))
1075
1076 factory.addStep(ShellCommand(
1077 name = "signunpack",
1078 description = "Unpacking signed files",
1079 command = ["tar", "-xzf", "sign.tar.gz"],
1080 haltOnFailure = True
1081 ))
1082
1083 # upload
1084 factory.addStep(ShellCommand(
1085 name = "dirprepare",
1086 description = "Preparing upload directory structure",
1087 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1088 haltOnFailure = True
1089 ))
1090
1091 factory.addStep(ShellCommand(
1092 name = "linkprepare",
1093 description = "Preparing repository symlink",
1094 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1095 doStepIf = IsNoMasterBuild,
1096 haltOnFailure = True
1097 ))
1098
1099 if enable_kmod_archive:
1100 factory.addStep(ShellCommand(
1101 name = "kmoddirprepare",
1102 description = "Preparing kmod archive upload directory",
1103 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1104 haltOnFailure = True
1105 ))
1106
1107 factory.addStep(ShellCommand(
1108 name = "dirupload",
1109 description = "Uploading directory structure",
1110 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1111 env={'RSYNC_PASSWORD': rsync_bin_key},
1112 haltOnFailure = True,
1113 logEnviron = False,
1114 locks = NetLockUl,
1115 ))
1116
1117 # download remote sha256sums to 'target-sha256sums'
1118 factory.addStep(ShellCommand(
1119 name = "target-sha256sums",
1120 description = "Fetching remote sha256sums for target",
1121 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1122 env={'RSYNC_PASSWORD': rsync_bin_key},
1123 logEnviron = False,
1124 haltOnFailure = False,
1125 flunkOnFailure = False,
1126 warnOnFailure = False,
1127 ))
1128
1129 # build list of files to upload
1130 factory.addStep(FileDownload(
1131 name = "dlsha2rsyncpl",
1132 mastersrc = scripts_dir + '/sha2rsync.pl',
1133 workerdest = "../sha2rsync.pl",
1134 mode = 0o755,
1135 ))
1136
1137 factory.addStep(ShellCommand(
1138 name = "buildlist",
1139 description = "Building list of files to upload",
1140 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1141 haltOnFailure = True,
1142 ))
1143
1144 factory.addStep(FileDownload(
1145 name = "dlrsync.sh",
1146 mastersrc = scripts_dir + '/rsync.sh',
1147 workerdest = "../rsync.sh",
1148 mode = 0o755
1149 ))
1150
1151 # upload new files and update existing ones
1152 factory.addStep(ShellCommand(
1153 name = "targetupload",
1154 description = "Uploading target files",
1155 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1156 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1157 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1158 env={'RSYNC_PASSWORD': rsync_bin_key},
1159 haltOnFailure = True,
1160 logEnviron = False,
1161 ))
1162
1163 # delete files which don't exist locally
1164 factory.addStep(ShellCommand(
1165 name = "targetprune",
1166 description = "Pruning target files",
1167 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1168 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1169 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1170 env={'RSYNC_PASSWORD': rsync_bin_key},
1171 haltOnFailure = True,
1172 logEnviron = False,
1173 locks = NetLockUl,
1174 ))
1175
1176 if enable_kmod_archive:
1177 factory.addStep(ShellCommand(
1178 name = "kmodupload",
1179 description = "Uploading kmod archive",
1180 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1181 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1182 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1183 env={'RSYNC_PASSWORD': rsync_bin_key},
1184 haltOnFailure = True,
1185 logEnviron = False,
1186 locks = NetLockUl,
1187 ))
1188
1189 if rsync_src_url is not None:
1190 factory.addStep(ShellCommand(
1191 name = "sourcelist",
1192 description = "Finding source archives to upload",
1193 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1194 haltOnFailure = True
1195 ))
1196
1197 factory.addStep(ShellCommand(
1198 name = "sourceupload",
1199 description = "Uploading source archives",
1200 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1201 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1202 env={'RSYNC_PASSWORD': rsync_src_key},
1203 haltOnFailure = True,
1204 logEnviron = False,
1205 locks = NetLockUl,
1206 ))
1207
1208 if False:
1209 factory.addStep(ShellCommand(
1210 name = "packageupload",
1211 description = "Uploading package files",
1212 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1213 env={'RSYNC_PASSWORD': rsync_bin_key},
1214 haltOnFailure = False,
1215 flunkOnFailure = False,
1216 warnOnFailure = True,
1217 logEnviron = False,
1218 locks = NetLockUl,
1219 ))
1220
1221 # logs
1222 if False:
1223 factory.addStep(ShellCommand(
1224 name = "upload",
1225 description = "Uploading logs",
1226 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1227 env={'RSYNC_PASSWORD': rsync_bin_key},
1228 haltOnFailure = False,
1229 flunkOnFailure = False,
1230 warnOnFailure = True,
1231 alwaysRun = True,
1232 logEnviron = False,
1233 locks = NetLockUl,
1234 ))
1235
1236 factory.addStep(ShellCommand(
1237 name = "df",
1238 description = "Reporting disk usage",
1239 command=["df", "-h", "."],
1240 env={'LC_ALL': 'C'},
1241 haltOnFailure = False,
1242 flunkOnFailure = False,
1243 warnOnFailure = False,
1244 alwaysRun = True
1245 ))
1246
1247 factory.addStep(ShellCommand(
1248 name = "du",
1249 description = "Reporting estimated file space usage",
1250 command=["du", "-sh", "."],
1251 env={'LC_ALL': 'C'},
1252 haltOnFailure = False,
1253 flunkOnFailure = False,
1254 warnOnFailure = False,
1255 alwaysRun = True
1256 ))
1257
1258 factory.addStep(ShellCommand(
1259 name = "ccachestat",
1260 description = "Reporting ccache stats",
1261 command=["ccache", "-s"],
1262 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1263 want_stderr = False,
1264 haltOnFailure = False,
1265 flunkOnFailure = False,
1266 warnOnFailure = False,
1267 alwaysRun = True,
1268 ))
1269
1270 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1271
1272 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1273 force_factory.addStep(steps.Trigger(
1274 name = "trigger_%s" % target,
1275 description = "Triggering %s build" % target,
1276 schedulerNames = [ "trigger_%s" % target ],
1277 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1278 doStepIf = IsTargetSelected(target)
1279 ))
1280
1281
1282 ####### STATUS TARGETS
1283
1284 # 'status' is a list of Status Targets. The results of each build will be
1285 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1286 # including web pages, email senders, and IRC bots.
1287
1288 if "status_bind" in inip1:
1289 c['www'] = {
1290 'port': inip1.get("status_bind"),
1291 'plugins': {
1292 'waterfall_view': True,
1293 'console_view': True,
1294 'grid_view': True
1295 }
1296 }
1297
1298 if "status_user" in inip1 and "status_password" in inip1:
1299 c['www']['auth'] = util.UserPasswordAuth([
1300 (inip1.get("status_user"), inip1.get("status_password"))
1301 ])
1302 c['www']['authz'] = util.Authz(
1303 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1304 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1305 )
1306
1307 c['services'] = []
1308 if ini.has_section("irc"):
1309 iniirc = ini['irc']
1310 irc_host = iniirc.get("host", None)
1311 irc_port = iniirc.getint("port", 6667)
1312 irc_chan = iniirc.get("channel", None)
1313 irc_nick = iniirc.get("nickname", None)
1314 irc_pass = iniirc.get("password", None)
1315
1316 if irc_host and irc_nick and irc_chan:
1317 irc = reporters.IRC(irc_host, irc_nick,
1318 port = irc_port,
1319 password = irc_pass,
1320 channels = [ irc_chan ],
1321 notify_events = [ 'exception', 'problem', 'recovery' ]
1322 )
1323
1324 c['services'].append(irc)
1325
1326 c['revlink'] = util.RevlinkMatch([
1327 r'https://git.openwrt.org/openwrt/(.*).git'
1328 ],
1329 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1330
1331 ####### DB URL
1332
1333 c['db'] = {
1334 # This specifies what database buildbot uses to store its state. You can leave
1335 # this at its default for all but the largest installations.
1336 'db_url' : "sqlite:///state.sqlite",
1337 }
1338
1339 c['buildbotNetUsageData'] = None