6f6c650cf54c2a5783b4de3d59ccc5a7ca4de17f
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini["phase1"]
58
59 # Globals
60 work_dir = os.path.abspath(ini["general"].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini["repo"].get("url")
64
65 rsync_defopts = ["-v", "--timeout=120"]
66
67 # if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72
73 def ini_parse_branch(section):
74 b = {}
75 name = section.get("name")
76
77 if not name:
78 raise ValueError("missing 'name' in " + repr(section))
79 if name in branches:
80 raise ValueError("duplicate branch name in " + repr(section))
81
82 b["name"] = name
83 b["bin_url"] = section.get("binary_url")
84 b["bin_key"] = section.get("binary_password")
85
86 b["src_url"] = section.get("source_url")
87 b["src_key"] = section.get("source_password")
88
89 b["gpg_key"] = section.get("gpg_key")
90
91 b["usign_key"] = section.get("usign_key")
92 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
93 b["usign_comment"] = section.get("usign_comment", usign_comment)
94
95 b["config_seed"] = section.get("config_seed")
96
97 b["kmod_archive"] = section.getboolean("kmod_archive", False)
98
99 branches[name] = b
100 log.msg("Configured branch: {}".format(name))
101
102
103 # PB port can be either a numeric port or a connection string
104 pb_port = inip1.get("port") or 9989
105
106 # This is the dictionary that the buildmaster pays attention to. We also use
107 # a shorter alias to save typing.
108 c = BuildmasterConfig = {}
109
110 ####### PROJECT IDENTITY
111
112 # the 'title' string will appear at the top of this buildbot
113 # installation's html.WebStatus home page (linked to the
114 # 'titleURL') and is embedded in the title of the waterfall HTML page.
115
116 c["title"] = ini["general"].get("title")
117 c["titleURL"] = ini["general"].get("title_url")
118
119 # the 'buildbotURL' string should point to the location where the buildbot's
120 # internal web server (usually the html.WebStatus page) is visible. This
121 # typically uses the port number set in the Waterfall 'status' entry, but
122 # with an externally-visible host name which the buildbot cannot figure out
123 # without some help.
124
125 c["buildbotURL"] = inip1.get("buildbot_url")
126
127 ####### BUILDWORKERS
128
129 # The 'workers' list defines the set of recognized buildworkers. Each element is
130 # a Worker object, specifying a unique worker name and password. The same
131 # worker name and password must be configured on the worker.
132
133 c["workers"] = []
134 NetLocks = dict()
135
136
137 def ini_parse_workers(section):
138 name = section.get("name")
139 password = section.get("password")
140 phase = section.getint("phase")
141 tagonly = section.getboolean("tag_only")
142 rsyncipv4 = section.getboolean("rsync_ipv4")
143
144 if not name or not password or not phase == 1:
145 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
146 return
147
148 sl_props = {"tag_only": tagonly}
149 if "dl_lock" in section:
150 lockname = section.get("dl_lock")
151 sl_props["dl_lock"] = lockname
152 if lockname not in NetLocks:
153 NetLocks[lockname] = locks.MasterLock(lockname)
154 if "ul_lock" in section:
155 lockname = section.get("ul_lock")
156 sl_props["ul_lock"] = lockname
157 if lockname not in NetLocks:
158 NetLocks[lockname] = locks.MasterLock(lockname)
159 if rsyncipv4:
160 sl_props[
161 "rsync_ipv4"
162 ] = True # only set prop if required, we use '+' Interpolate substitution
163
164 log.msg("Configured worker: {}".format(name))
165 # NB: phase1 build factory requires workers to be single-build only
166 c["workers"].append(Worker(name, password, max_builds=1, properties=sl_props))
167
168
169 for section in ini.sections():
170 if section.startswith("branch "):
171 ini_parse_branch(ini[section])
172
173 if section.startswith("worker "):
174 ini_parse_workers(ini[section])
175
176 # list of branches in build-priority order
177 branchNames = [branches[b]["name"] for b in branches]
178
179 c["protocols"] = {"pb": {"port": pb_port}}
180
181 # coalesce builds
182 c["collapseRequests"] = True
183
184 # Reduce amount of backlog data
185 c["configurators"] = [
186 util.JanitorConfigurator(
187 logHorizon=timedelta(days=3),
188 hour=6,
189 )
190 ]
191
192
193 @defer.inlineCallbacks
194 def getNewestCompleteTime(bldr):
195 """Returns the complete_at of the latest completed and not SKIPPED
196 build request for this builder, or None if there are no such build
197 requests. We need to filter out SKIPPED requests because we're
198 using collapseRequests=True which is unfortunately marking all
199 previous requests as complete when new buildset is created.
200
201 @returns: datetime instance or None, via Deferred
202 """
203
204 bldrid = yield bldr.getBuilderId()
205 completed = yield bldr.master.data.get(
206 ("builders", bldrid, "buildrequests"),
207 [
208 resultspec.Filter("complete", "eq", [True]),
209 resultspec.Filter("results", "ne", [results.SKIPPED]),
210 ],
211 order=["-complete_at"],
212 limit=1,
213 )
214 if not completed:
215 return
216
217 complete_at = completed[0]["complete_at"]
218
219 last_build = yield bldr.master.data.get(
220 ("builds",),
221 [
222 resultspec.Filter("builderid", "eq", [bldrid]),
223 ],
224 order=["-started_at"],
225 limit=1,
226 )
227
228 if last_build and last_build[0]:
229 last_complete_at = last_build[0]["complete_at"]
230 if last_complete_at and (last_complete_at > complete_at):
231 return last_complete_at
232
233 return complete_at
234
235
236 @defer.inlineCallbacks
237 def prioritizeBuilders(master, builders):
238 """Returns sorted list of builders by their last timestamp of completed and
239 not skipped build, ordered first by branch name.
240
241 @returns: list of sorted builders
242 """
243
244 bldrNamePrio = {"__Janitor": 0, "00_force_build": 0}
245 i = 1
246 for bname in branchNames:
247 bldrNamePrio[bname] = i
248 i += 1
249
250 def is_building(bldr):
251 return bool(bldr.building) or bool(bldr.old_building)
252
253 def bldr_info(bldr):
254 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
255 d.addCallback(lambda complete_at: (complete_at, bldr))
256 return d
257
258 def bldr_sort(item):
259 (complete_at, bldr) = item
260
261 pos = 99
262 for name, prio in bldrNamePrio.items():
263 if bldr.name.startswith(name):
264 pos = prio
265 break
266
267 if not complete_at:
268 date = datetime.min
269 complete_at = date.replace(tzinfo=tzutc())
270
271 if is_building(bldr):
272 date = datetime.max
273 complete_at = date.replace(tzinfo=tzutc())
274
275 return (pos, complete_at, bldr.name)
276
277 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
278 results.sort(key=bldr_sort)
279
280 # for r in results:
281 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
282
283 return [r[1] for r in results]
284
285
286 c["prioritizeBuilders"] = prioritizeBuilders
287
288 ####### CHANGESOURCES
289
290 # find targets
291 targets = dict()
292
293
294 def populateTargets():
295 """fetch a shallow clone of each configured branch in turn:
296 execute dump-target-info.pl and collate the results to ensure
297 targets that only exist in specific branches get built.
298 This takes a while during master startup but is executed only once.
299 """
300 sourcegit = work_dir + "/source.git"
301 for branch in branchNames:
302 log.msg(f"Populating targets for {branch}, this will take time")
303
304 if os.path.isdir(sourcegit):
305 subprocess.call(["rm", "-rf", sourcegit])
306
307 subprocess.call(
308 [
309 "git",
310 "clone",
311 "-q",
312 "--depth=1",
313 "--branch=" + branch,
314 repo_url,
315 sourcegit,
316 ]
317 )
318
319 os.makedirs(sourcegit + "/tmp", exist_ok=True)
320 findtargets = subprocess.Popen(
321 ["./scripts/dump-target-info.pl", "targets"],
322 stdout=subprocess.PIPE,
323 stderr=subprocess.DEVNULL,
324 cwd=sourcegit,
325 )
326
327 targets[branch] = set()
328 while True:
329 line = findtargets.stdout.readline()
330 if not line:
331 break
332 ta = line.decode().strip().split(" ")
333 targets[branch].add(ta[0])
334
335 subprocess.call(["rm", "-rf", sourcegit])
336
337
338 populateTargets()
339
340 # the 'change_source' setting tells the buildmaster how it should find out
341 # about source code changes.
342
343 c["change_source"] = []
344 c["change_source"].append(
345 GitPoller(
346 repo_url,
347 workdir=work_dir + "/work.git",
348 branches=branchNames,
349 pollAtLaunch=True,
350 pollinterval=300,
351 )
352 )
353
354 ####### SCHEDULERS
355
356 # Configure the Schedulers, which decide how to react to incoming changes.
357
358
359 # Selector for known valid tags
360 class TagChoiceParameter(BaseParameter):
361 spec_attributes = ["strict", "choices"]
362 type = "list"
363 strict = True
364
365 def __init__(self, name, label=None, **kw):
366 super().__init__(name, label, **kw)
367 self._choice_list = []
368
369 def getRevTags(self, findtag=None):
370 taglist = []
371 branchvers = []
372
373 # we will filter out tags that do no match the configured branches
374 for b in branchNames:
375 basever = re.search(r"-([0-9]+\.[0-9]+)$", b)
376 if basever:
377 branchvers.append(basever[1])
378
379 # grab tags from remote repository
380 alltags = subprocess.Popen(
381 ["git", "ls-remote", "--tags", repo_url], stdout=subprocess.PIPE
382 )
383
384 while True:
385 line = alltags.stdout.readline()
386
387 if not line:
388 break
389
390 (rev, tag) = line.split()
391
392 # does it match known format? ('vNN.NN.NN(-rcN)')
393 tagver = re.search(
394 r"\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$",
395 tag.decode().strip(),
396 )
397
398 # only list valid tags matching configured branches
399 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
400 # if we want a specific tag, ignore all that don't match
401 if findtag and findtag != tagver[1]:
402 continue
403 taglist.append({"rev": rev.decode().strip(), "tag": tagver[1]})
404
405 return taglist
406
407 @property
408 def choices(self):
409 taglist = [rt["tag"] for rt in self.getRevTags()]
410 taglist.sort(
411 reverse=True,
412 key=lambda tag: tag if re.search(r"-rc[0-9]+$", tag) else tag + "-z",
413 )
414 taglist.insert(0, "")
415
416 self._choice_list = taglist
417
418 return self._choice_list
419
420 def updateFromKwargs(self, properties, kwargs, **unused):
421 tag = self.getFromKwargs(kwargs)
422 properties[self.name] = tag
423
424 # find the commit matching the tag
425 findtag = self.getRevTags(tag)
426
427 if not findtag:
428 raise ValidationError("Couldn't find tag")
429
430 properties["force_revision"] = findtag[0]["rev"]
431
432 # find the branch matching the tag
433 branch = None
434 branchver = re.search(r"v([0-9]+\.[0-9]+)", tag)
435 for b in branchNames:
436 if b.endswith(branchver[1]):
437 branch = b
438
439 if not branch:
440 raise ValidationError("Couldn't find branch")
441
442 properties["force_branch"] = branch
443
444 def parse_from_arg(self, s):
445 if self.strict and s not in self._choice_list:
446 raise ValidationError(
447 "'%s' does not belong to list of available choices '%s'"
448 % (s, self._choice_list)
449 )
450 return s
451
452
453 @util.renderer
454 @defer.inlineCallbacks
455 def builderNames(props):
456 """since we have per branch and per target builders,
457 address the relevant builder for each new buildrequest
458 based on the request's desired branch and target.
459 """
460 branch = props.getProperty("branch")
461 target = props.getProperty("target", "")
462
463 if target == "all":
464 target = ""
465
466 # if that didn't work, try sourcestamp to find a branch
467 if not branch:
468 # match builders with target branch
469 ss = props.sourcestamps[0]
470 if ss:
471 branch = ss["branch"]
472 else:
473 log.msg("couldn't find builder")
474 return [] # nothing works
475
476 bname = branch + "_" + target
477 builders = []
478
479 for b in (yield props.master.data.get(("builders",))):
480 if not b["name"].startswith(bname):
481 continue
482 builders.append(b["name"])
483
484 return builders
485
486
487 c["schedulers"] = []
488 c["schedulers"].append(
489 AnyBranchScheduler(
490 name="all",
491 change_filter=util.ChangeFilter(branch=branchNames),
492 treeStableTimer=15 * 60,
493 builderNames=builderNames,
494 )
495 )
496
497 c["schedulers"].append(
498 ForceScheduler(
499 name="force",
500 buttonName="Force builds",
501 label="Force build details",
502 builderNames=["00_force_build"],
503 codebases=[
504 util.CodebaseParameter(
505 "",
506 label="Repository",
507 branch=util.FixedParameter(name="branch", default=""),
508 revision=util.FixedParameter(name="revision", default=""),
509 repository=util.FixedParameter(name="repository", default=""),
510 project=util.FixedParameter(name="project", default=""),
511 )
512 ],
513 reason=util.StringParameter(
514 name="reason",
515 label="Reason",
516 default="Trigger build",
517 required=True,
518 size=80,
519 ),
520 properties=[
521 # NB: avoid nesting to simplify processing of properties
522 util.ChoiceStringParameter(
523 name="target",
524 label="Build target",
525 default="all",
526 choices=["all"] + [t for b in branchNames for t in targets[b]],
527 ),
528 TagChoiceParameter(name="tag", label="Build tag", default=""),
529 ],
530 )
531 )
532
533 c["schedulers"].append(
534 schedulers.Triggerable(name="trigger", builderNames=builderNames)
535 )
536
537 ####### BUILDERS
538
539 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
540 # what steps, and which workers can execute them. Note that any particular build will
541 # only take place on one worker.
542
543
544 def IsNoMasterBuild(step):
545 return step.getProperty("branch") != "master"
546
547
548 def IsUsignEnabled(step):
549 branch = step.getProperty("branch")
550 return branch and branches[branch].get("usign_key")
551
552
553 def IsSignEnabled(step):
554 branch = step.getProperty("branch")
555 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
556
557
558 def IsKmodArchiveEnabled(step):
559 branch = step.getProperty("branch")
560 return branch and branches[branch].get("kmod_archive")
561
562
563 def IsKmodArchiveAndRsyncEnabled(step):
564 branch = step.getProperty("branch")
565 return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
566
567
568 def GetBaseVersion(branch):
569 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
570 return branch.split("-")[1]
571 else:
572 return "master"
573
574
575 @properties.renderer
576 def GetVersionPrefix(props):
577 branch = props.getProperty("branch")
578 basever = GetBaseVersion(branch)
579 if props.hasProperty("tag") and re.match(
580 r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]
581 ):
582 return "%s/" % props["tag"][1:]
583 elif basever != "master":
584 return "%s-SNAPSHOT/" % basever
585 else:
586 return ""
587
588
589 @util.renderer
590 def GetConfigSeed(props):
591 branch = props.getProperty("branch")
592 return branch and branches[branch].get("config_seed") or ""
593
594
595 @util.renderer
596 def GetRsyncParams(props, srcorbin, urlorkey):
597 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
598 branch = props.getProperty("branch")
599 opt = srcorbin + "_" + urlorkey
600 return branch and branches[branch].get(opt)
601
602
603 @util.renderer
604 def GetUsignKey(props):
605 branch = props.getProperty("branch")
606 return branch and branches[branch].get("usign_key")
607
608
609 def GetNextBuild(builder, requests):
610 for r in requests:
611 if r.properties:
612 # order tagged build first
613 if r.properties.hasProperty("tag"):
614 return r
615
616 r = requests[0]
617 # log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
618 return r
619
620
621 def MakeEnv(overrides=None, tryccache=False):
622 env = {
623 "CCC": Interpolate("%(prop:cc_command:-gcc)s"),
624 "CCXX": Interpolate("%(prop:cxx_command:-g++)s"),
625 }
626 if tryccache:
627 env["CC"] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
628 env["CXX"] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
629 env["CCACHE"] = Interpolate("%(prop:ccache_command:-)s")
630 else:
631 env["CC"] = env["CCC"]
632 env["CXX"] = env["CCXX"]
633 env["CCACHE"] = ""
634 if overrides is not None:
635 env.update(overrides)
636 return env
637
638
639 @properties.renderer
640 def NetLockDl(props, extralock=None):
641 lock = None
642 if props.hasProperty("dl_lock"):
643 lock = NetLocks[props["dl_lock"]]
644 if lock is not None:
645 return [lock.access("exclusive")]
646 else:
647 return []
648
649
650 @properties.renderer
651 def NetLockUl(props):
652 lock = None
653 if props.hasProperty("ul_lock"):
654 lock = NetLocks[props["ul_lock"]]
655 if lock is not None:
656 return [lock.access("exclusive")]
657 else:
658 return []
659
660
661 def IsTargetSelected(target):
662 def CheckTargetProperty(step):
663 selected_target = step.getProperty("target", "all")
664 if selected_target != "all" and selected_target != target:
665 return False
666 return True
667
668 return CheckTargetProperty
669
670
671 @util.renderer
672 def UsignSec2Pub(props):
673 branch = props.getProperty("branch")
674 try:
675 comment = (
676 branches[branch].get("usign_comment") or "untrusted comment: secret key"
677 )
678 seckey = branches[branch].get("usign_key")
679 seckey = base64.b64decode(seckey)
680 except Exception:
681 return None
682
683 return "{}\n{}".format(
684 re.sub(r"\bsecret key$", "public key", comment),
685 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]),
686 )
687
688
689 def canStartBuild(builder, wfb, request):
690 """filter out non tag requests for tag_only workers."""
691 wtagonly = wfb.worker.properties.getProperty("tag_only")
692 tag = request.properties.getProperty("tag")
693
694 if wtagonly and not tag:
695 return False
696
697 return True
698
699
700 c["builders"] = []
701
702 workerNames = []
703
704 for worker in c["workers"]:
705 workerNames.append(worker.workername)
706
707 # add a single LocalWorker to handle the forcebuild builder
708 c["workers"].append(LocalWorker("__local_force_build", max_builds=1))
709
710 force_factory = BuildFactory()
711 force_factory.addStep(
712 steps.Trigger(
713 name="trigger_build",
714 schedulerNames=["trigger"],
715 sourceStamps=[
716 {
717 "codebase": "",
718 "branch": Property("force_branch"),
719 "revision": Property("force_revision"),
720 "repository": repo_url,
721 "project": "",
722 }
723 ],
724 set_properties={
725 "reason": Property("reason"),
726 "tag": Property("tag"),
727 "target": Property("target"),
728 },
729 )
730 )
731
732 c["builders"].append(
733 BuilderConfig(
734 name="00_force_build", workername="__local_force_build", factory=force_factory
735 )
736 )
737
738
739 # NB the phase1 build factory assumes workers are single-build only
740 def prepareFactory(target):
741 (target, subtarget) = target.split("/")
742
743 factory = BuildFactory()
744
745 # setup shared work directory if required
746 factory.addStep(
747 ShellCommand(
748 name="sharedwd",
749 descriptionDone="Shared work directory set up",
750 command='test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
751 workdir=".",
752 haltOnFailure=True,
753 )
754 )
755
756 # find number of cores
757 factory.addStep(
758 SetPropertyFromCommand(
759 name="nproc",
760 property="nproc",
761 description="Finding number of CPUs",
762 command=["nproc"],
763 )
764 )
765
766 # find gcc and g++ compilers
767 factory.addStep(
768 FileDownload(
769 name="dlfindbinpl",
770 mastersrc=scripts_dir + "/findbin.pl",
771 workerdest="../findbin.pl",
772 mode=0o755,
773 )
774 )
775
776 factory.addStep(
777 SetPropertyFromCommand(
778 name="gcc",
779 property="cc_command",
780 description="Finding gcc command",
781 command=["../findbin.pl", "gcc", "", ""],
782 haltOnFailure=True,
783 )
784 )
785
786 factory.addStep(
787 SetPropertyFromCommand(
788 name="g++",
789 property="cxx_command",
790 description="Finding g++ command",
791 command=["../findbin.pl", "g++", "", ""],
792 haltOnFailure=True,
793 )
794 )
795
796 # see if ccache is available
797 factory.addStep(
798 SetPropertyFromCommand(
799 name="ccache",
800 property="ccache_command",
801 description="Testing for ccache command",
802 command=["which", "ccache"],
803 haltOnFailure=False,
804 flunkOnFailure=False,
805 warnOnFailure=False,
806 hideStepIf=lambda r, s: r == results.FAILURE,
807 )
808 )
809
810 # check out the source
811 # Git() runs:
812 # if repo doesn't exist: 'git clone repourl'
813 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
814 # git cat-file -e <commit>
815 # git checkout -f <commit>
816 # git checkout -B <branch>
817 # git rev-parse HEAD
818 factory.addStep(
819 Git(
820 name="git",
821 repourl=repo_url,
822 mode="full",
823 method="fresh",
824 locks=NetLockDl,
825 haltOnFailure=True,
826 )
827 )
828
829 # workaround for https://github.com/openwrt/buildbot/issues/5
830 factory.addStep(
831 Git(
832 name="git me once more please",
833 repourl=repo_url,
834 mode="full",
835 method="fresh",
836 locks=NetLockDl,
837 haltOnFailure=True,
838 )
839 )
840
841 # update remote refs
842 factory.addStep(
843 ShellCommand(
844 name="fetchrefs",
845 description="Fetching Git remote refs",
846 descriptionDone="Git remote refs fetched",
847 command=[
848 "git",
849 "fetch",
850 "origin",
851 Interpolate(
852 "+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s"
853 ),
854 ],
855 haltOnFailure=True,
856 )
857 )
858
859 # getver.sh requires local branches to track upstream otherwise version computation fails.
860 # Git() does not set tracking branches when cloning or switching, so work around this here
861 factory.addStep(
862 ShellCommand(
863 name="trackupstream",
864 description="Setting upstream branch",
865 descriptionDone="getver.sh is happy now",
866 command=["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")],
867 haltOnFailure=True,
868 )
869 )
870
871 # Verify that Git HEAD points to a tag or branch
872 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
873 factory.addStep(
874 ShellCommand(
875 name="gitverify",
876 description="Ensuring that Git HEAD is pointing to a branch or tag",
877 descriptionDone="Git HEAD is sane",
878 command='git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
879 haltOnFailure=True,
880 )
881 )
882
883 factory.addStep(
884 StringDownload(
885 name="ccachecc",
886 s='#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
887 workerdest="../ccache_cc.sh",
888 mode=0o755,
889 )
890 )
891
892 factory.addStep(
893 StringDownload(
894 name="ccachecxx",
895 s='#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
896 workerdest="../ccache_cxx.sh",
897 mode=0o755,
898 )
899 )
900
901 # feed
902 factory.addStep(
903 ShellCommand(
904 name="updatefeeds",
905 description="Updating feeds",
906 command=["./scripts/feeds", "update"],
907 env=MakeEnv(tryccache=True),
908 haltOnFailure=True,
909 locks=NetLockDl,
910 )
911 )
912
913 # feed
914 factory.addStep(
915 ShellCommand(
916 name="installfeeds",
917 description="Installing feeds",
918 command=["./scripts/feeds", "install", "-a"],
919 env=MakeEnv(tryccache=True),
920 haltOnFailure=True,
921 )
922 )
923
924 # seed config
925 factory.addStep(
926 StringDownload(
927 name="dlconfigseed",
928 s=Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
929 workerdest=".config",
930 mode=0o644,
931 )
932 )
933
934 # configure
935 factory.addStep(
936 ShellCommand(
937 name="newconfig",
938 descriptionDone=".config seeded",
939 command=Interpolate(
940 "printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config",
941 target=target,
942 subtarget=subtarget,
943 usign=GetUsignKey,
944 ),
945 )
946 )
947
948 factory.addStep(
949 ShellCommand(
950 name="defconfig",
951 description="Populating .config",
952 command=["make", "defconfig"],
953 env=MakeEnv(),
954 )
955 )
956
957 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
958 factory.addStep(
959 ShellCommand(
960 name="checkarch",
961 description="Checking architecture",
962 descriptionDone="Architecture validated",
963 command='grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config'
964 % (target, subtarget),
965 logEnviron=False,
966 want_stdout=False,
967 want_stderr=False,
968 haltOnFailure=True,
969 flunkOnFailure=False, # this is not a build FAILURE - TODO mark build as SKIPPED
970 )
971 )
972
973 # find libc suffix
974 factory.addStep(
975 SetPropertyFromCommand(
976 name="libc",
977 property="libc",
978 description="Finding libc suffix",
979 command=[
980 "sed",
981 "-ne",
982 '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }',
983 ".config",
984 ],
985 )
986 )
987
988 # install build key
989 factory.addStep(
990 StringDownload(
991 name="dlkeybuildpub",
992 s=Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
993 workerdest="key-build.pub",
994 mode=0o600,
995 doStepIf=IsUsignEnabled,
996 )
997 )
998
999 factory.addStep(
1000 StringDownload(
1001 name="dlkeybuild",
1002 s="# fake private key",
1003 workerdest="key-build",
1004 mode=0o600,
1005 doStepIf=IsUsignEnabled,
1006 )
1007 )
1008
1009 factory.addStep(
1010 StringDownload(
1011 name="dlkeybuilducert",
1012 s="# fake certificate",
1013 workerdest="key-build.ucert",
1014 mode=0o600,
1015 doStepIf=IsUsignEnabled,
1016 )
1017 )
1018
1019 # prepare dl
1020 factory.addStep(
1021 ShellCommand(
1022 name="dldir",
1023 description="Preparing dl/",
1024 descriptionDone="dl/ prepared",
1025 command='mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
1026 workdir=Property("builddir"),
1027 logEnviron=False,
1028 want_stdout=False,
1029 )
1030 )
1031
1032 # cleanup dl
1033 factory.addStep(
1034 ShellCommand(
1035 name="dlprune",
1036 description="Pruning dl/",
1037 descriptionDone="dl/ pruned",
1038 command="find dl/ -mindepth 1 -atime +15 -delete -print",
1039 logEnviron=False,
1040 haltOnFailure=False,
1041 flunkOnFailure=False,
1042 warnOnFailure=False,
1043 )
1044 )
1045
1046 # prepare tar
1047 factory.addStep(
1048 ShellCommand(
1049 name="dltar",
1050 description="Building and installing GNU tar",
1051 descriptionDone="GNU tar built and installed",
1052 command=[
1053 "make",
1054 Interpolate("-j%(prop:nproc:-1)s"),
1055 "tools/tar/compile",
1056 "V=s",
1057 ],
1058 env=MakeEnv(tryccache=True),
1059 haltOnFailure=True,
1060 )
1061 )
1062
1063 # populate dl
1064 factory.addStep(
1065 ShellCommand(
1066 name="dlrun",
1067 description="Populating dl/",
1068 descriptionDone="dl/ populated",
1069 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
1070 env=MakeEnv(),
1071 logEnviron=False,
1072 locks=NetLockDl,
1073 )
1074 )
1075
1076 factory.addStep(
1077 ShellCommand(
1078 name="cleanbase",
1079 description="Cleaning base-files",
1080 command=["make", "package/base-files/clean", "V=s"],
1081 )
1082 )
1083
1084 # build
1085 factory.addStep(
1086 ShellCommand(
1087 name="tools",
1088 description="Building and installing tools",
1089 descriptionDone="Tools built and installed",
1090 command=[
1091 "make",
1092 Interpolate("-j%(prop:nproc:-1)s"),
1093 "tools/install",
1094 "V=s",
1095 ],
1096 env=MakeEnv(tryccache=True),
1097 haltOnFailure=True,
1098 )
1099 )
1100
1101 factory.addStep(
1102 ShellCommand(
1103 name="toolchain",
1104 description="Building and installing toolchain",
1105 descriptionDone="Toolchain built and installed",
1106 command=[
1107 "make",
1108 Interpolate("-j%(prop:nproc:-1)s"),
1109 "toolchain/install",
1110 "V=s",
1111 ],
1112 env=MakeEnv(),
1113 haltOnFailure=True,
1114 )
1115 )
1116
1117 factory.addStep(
1118 ShellCommand(
1119 name="kmods",
1120 description="Building kmods",
1121 descriptionDone="Kmods built",
1122 command=[
1123 "make",
1124 Interpolate("-j%(prop:nproc:-1)s"),
1125 "target/compile",
1126 "V=s",
1127 "IGNORE_ERRORS=n m",
1128 "BUILD_LOG=1",
1129 ],
1130 env=MakeEnv(),
1131 haltOnFailure=True,
1132 )
1133 )
1134
1135 # find kernel version
1136 factory.addStep(
1137 SetPropertyFromCommand(
1138 name="kernelversion",
1139 property="kernelversion",
1140 description="Finding the effective Kernel version",
1141 command="make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1142 env={"TOPDIR": Interpolate("%(prop:builddir)s/build")},
1143 )
1144 )
1145
1146 factory.addStep(
1147 ShellCommand(
1148 name="pkgclean",
1149 description="Cleaning up package build",
1150 descriptionDone="Package build cleaned up",
1151 command=["make", "package/cleanup", "V=s"],
1152 )
1153 )
1154
1155 factory.addStep(
1156 ShellCommand(
1157 name="pkgbuild",
1158 description="Building packages",
1159 descriptionDone="Packages built",
1160 command=[
1161 "make",
1162 Interpolate("-j%(prop:nproc:-1)s"),
1163 "package/compile",
1164 "V=s",
1165 "IGNORE_ERRORS=n m",
1166 "BUILD_LOG=1",
1167 ],
1168 env=MakeEnv(),
1169 haltOnFailure=True,
1170 )
1171 )
1172
1173 factory.addStep(
1174 ShellCommand(
1175 name="pkginstall",
1176 description="Installing packages",
1177 descriptionDone="Packages installed",
1178 command=[
1179 "make",
1180 Interpolate("-j%(prop:nproc:-1)s"),
1181 "package/install",
1182 "V=s",
1183 ],
1184 env=MakeEnv(),
1185 haltOnFailure=True,
1186 )
1187 )
1188
1189 factory.addStep(
1190 ShellCommand(
1191 name="pkgindex",
1192 description="Indexing packages",
1193 descriptionDone="Packages indexed",
1194 command=[
1195 "make",
1196 Interpolate("-j%(prop:nproc:-1)s"),
1197 "package/index",
1198 "V=s",
1199 "CONFIG_SIGNED_PACKAGES=",
1200 ],
1201 env=MakeEnv(),
1202 haltOnFailure=True,
1203 )
1204 )
1205
1206 factory.addStep(
1207 ShellCommand(
1208 name="images",
1209 description="Building and installing images",
1210 descriptionDone="Images built and installed",
1211 command=[
1212 "make",
1213 Interpolate("-j%(prop:nproc:-1)s"),
1214 "target/install",
1215 "V=s",
1216 ],
1217 env=MakeEnv(),
1218 haltOnFailure=True,
1219 )
1220 )
1221
1222 factory.addStep(
1223 ShellCommand(
1224 name="buildinfo",
1225 description="Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1226 command="make -j1 buildinfo V=s || true",
1227 env=MakeEnv(),
1228 haltOnFailure=True,
1229 )
1230 )
1231
1232 factory.addStep(
1233 ShellCommand(
1234 name="json_overview_image_info",
1235 description="Generating profiles.json in target folder",
1236 command="make -j1 json_overview_image_info V=s || true",
1237 env=MakeEnv(),
1238 haltOnFailure=True,
1239 )
1240 )
1241
1242 factory.addStep(
1243 ShellCommand(
1244 name="checksums",
1245 description="Calculating checksums",
1246 descriptionDone="Checksums calculated",
1247 command=["make", "-j1", "checksum", "V=s"],
1248 env=MakeEnv(),
1249 haltOnFailure=True,
1250 )
1251 )
1252
1253 factory.addStep(
1254 ShellCommand(
1255 name="kmoddir",
1256 descriptionDone="Kmod directory created",
1257 command=[
1258 "mkdir",
1259 "-p",
1260 Interpolate(
1261 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s",
1262 target=target,
1263 subtarget=subtarget,
1264 ),
1265 ],
1266 haltOnFailure=True,
1267 doStepIf=IsKmodArchiveEnabled,
1268 )
1269 )
1270
1271 factory.addStep(
1272 ShellCommand(
1273 name="kmodprepare",
1274 description="Preparing kmod archive",
1275 descriptionDone="Kmod archive prepared",
1276 command=[
1277 "rsync",
1278 "--include=/kmod-*.ipk",
1279 "--exclude=*",
1280 "-va",
1281 Interpolate(
1282 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/",
1283 target=target,
1284 subtarget=subtarget,
1285 ),
1286 Interpolate(
1287 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1288 target=target,
1289 subtarget=subtarget,
1290 ),
1291 ],
1292 haltOnFailure=True,
1293 doStepIf=IsKmodArchiveEnabled,
1294 )
1295 )
1296
1297 factory.addStep(
1298 ShellCommand(
1299 name="kmodindex",
1300 description="Indexing kmod archive",
1301 descriptionDone="Kmod archive indexed",
1302 command=[
1303 "make",
1304 Interpolate("-j%(prop:nproc:-1)s"),
1305 "package/index",
1306 "V=s",
1307 "CONFIG_SIGNED_PACKAGES=",
1308 Interpolate(
1309 "PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1310 target=target,
1311 subtarget=subtarget,
1312 ),
1313 ],
1314 env=MakeEnv(),
1315 haltOnFailure=True,
1316 doStepIf=IsKmodArchiveEnabled,
1317 )
1318 )
1319
1320 # sign
1321 factory.addStep(
1322 MasterShellCommand(
1323 name="signprepare",
1324 descriptionDone="Temporary signing directory prepared",
1325 command=["mkdir", "-p", "%s/signing" % (work_dir)],
1326 haltOnFailure=True,
1327 doStepIf=IsSignEnabled,
1328 )
1329 )
1330
1331 factory.addStep(
1332 ShellCommand(
1333 name="signpack",
1334 description="Packing files to sign",
1335 descriptionDone="Files to sign packed",
1336 command=Interpolate(
1337 "find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz",
1338 target=target,
1339 subtarget=subtarget,
1340 ),
1341 haltOnFailure=True,
1342 doStepIf=IsSignEnabled,
1343 )
1344 )
1345
1346 factory.addStep(
1347 FileUpload(
1348 workersrc="sign.tar.gz",
1349 masterdest="%s/signing/%s.%s.tar.gz" % (work_dir, target, subtarget),
1350 haltOnFailure=True,
1351 doStepIf=IsSignEnabled,
1352 )
1353 )
1354
1355 factory.addStep(
1356 MasterShellCommand(
1357 name="signfiles",
1358 description="Signing files",
1359 descriptionDone="Files signed",
1360 command=[
1361 "%s/signall.sh" % (scripts_dir),
1362 "%s/signing/%s.%s.tar.gz" % (work_dir, target, subtarget),
1363 Interpolate("%(prop:branch)s"),
1364 ],
1365 env={"CONFIG_INI": os.getenv("BUILDMASTER_CONFIG", "./config.ini")},
1366 haltOnFailure=True,
1367 doStepIf=IsSignEnabled,
1368 )
1369 )
1370
1371 factory.addStep(
1372 FileDownload(
1373 name="dlsigntargz",
1374 mastersrc="%s/signing/%s.%s.tar.gz" % (work_dir, target, subtarget),
1375 workerdest="sign.tar.gz",
1376 haltOnFailure=True,
1377 doStepIf=IsSignEnabled,
1378 )
1379 )
1380
1381 factory.addStep(
1382 ShellCommand(
1383 name="signunpack",
1384 description="Unpacking signed files",
1385 descriptionDone="Signed files unpacked",
1386 command=["tar", "-xzf", "sign.tar.gz"],
1387 haltOnFailure=True,
1388 doStepIf=IsSignEnabled,
1389 )
1390 )
1391
1392 # upload
1393 factory.addStep(
1394 ShellCommand(
1395 name="dirprepare",
1396 descriptionDone="Upload directory structure prepared",
1397 command=[
1398 "mkdir",
1399 "-p",
1400 Interpolate(
1401 "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s",
1402 target=target,
1403 subtarget=subtarget,
1404 prefix=GetVersionPrefix,
1405 ),
1406 ],
1407 haltOnFailure=True,
1408 )
1409 )
1410
1411 factory.addStep(
1412 ShellCommand(
1413 name="linkprepare",
1414 descriptionDone="Repository symlink prepared",
1415 command=[
1416 "ln",
1417 "-s",
1418 "-f",
1419 Interpolate(
1420 "../packages-%(kw:basever)s",
1421 basever=util.Transform(GetBaseVersion, Property("branch")),
1422 ),
1423 Interpolate(
1424 "tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix
1425 ),
1426 ],
1427 doStepIf=IsNoMasterBuild,
1428 haltOnFailure=True,
1429 )
1430 )
1431
1432 factory.addStep(
1433 ShellCommand(
1434 name="kmoddirprepare",
1435 descriptionDone="Kmod archive upload directory prepared",
1436 command=[
1437 "mkdir",
1438 "-p",
1439 Interpolate(
1440 "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s",
1441 target=target,
1442 subtarget=subtarget,
1443 prefix=GetVersionPrefix,
1444 ),
1445 ],
1446 haltOnFailure=True,
1447 doStepIf=IsKmodArchiveEnabled,
1448 )
1449 )
1450
1451 factory.addStep(
1452 ShellCommand(
1453 name="dirupload",
1454 description="Uploading directory structure",
1455 descriptionDone="Directory structure uploaded",
1456 command=["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")]
1457 + rsync_defopts
1458 + [
1459 "tmp/upload/",
1460 Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url")),
1461 ],
1462 env={
1463 "RSYNC_PASSWORD": Interpolate(
1464 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1465 )
1466 },
1467 haltOnFailure=True,
1468 logEnviron=False,
1469 locks=NetLockUl,
1470 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1471 )
1472 )
1473
1474 # download remote sha256sums to 'target-sha256sums'
1475 factory.addStep(
1476 ShellCommand(
1477 name="target-sha256sums",
1478 description="Fetching remote sha256sums for target",
1479 descriptionDone="Remote sha256sums for target fetched",
1480 command=["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")]
1481 + rsync_defopts
1482 + [
1483 Interpolate(
1484 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums",
1485 url=GetRsyncParams.withArgs("bin", "url"),
1486 target=target,
1487 subtarget=subtarget,
1488 prefix=GetVersionPrefix,
1489 ),
1490 "target-sha256sums",
1491 ],
1492 env={
1493 "RSYNC_PASSWORD": Interpolate(
1494 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1495 )
1496 },
1497 logEnviron=False,
1498 haltOnFailure=False,
1499 flunkOnFailure=False,
1500 warnOnFailure=False,
1501 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1502 )
1503 )
1504
1505 # build list of files to upload
1506 factory.addStep(
1507 FileDownload(
1508 name="dlsha2rsyncpl",
1509 mastersrc=scripts_dir + "/sha2rsync.pl",
1510 workerdest="../sha2rsync.pl",
1511 mode=0o755,
1512 )
1513 )
1514
1515 factory.addStep(
1516 ShellCommand(
1517 name="buildlist",
1518 description="Building list of files to upload",
1519 descriptionDone="List of files to upload built",
1520 command=[
1521 "../sha2rsync.pl",
1522 "target-sha256sums",
1523 Interpolate(
1524 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums",
1525 target=target,
1526 subtarget=subtarget,
1527 ),
1528 "rsynclist",
1529 ],
1530 haltOnFailure=True,
1531 )
1532 )
1533
1534 factory.addStep(
1535 FileDownload(
1536 name="dlrsync.sh",
1537 mastersrc=scripts_dir + "/rsync.sh",
1538 workerdest="../rsync.sh",
1539 mode=0o755,
1540 )
1541 )
1542
1543 # upload new files and update existing ones
1544 factory.addStep(
1545 ShellCommand(
1546 name="targetupload",
1547 description="Uploading target files",
1548 descriptionDone="Target files uploaded",
1549 command=[
1550 "../rsync.sh",
1551 "--exclude=/kmods/",
1552 "--files-from=rsynclist",
1553 "--delay-updates",
1554 "--partial-dir=.~tmp~%s~%s" % (target, subtarget),
1555 ]
1556 + rsync_defopts
1557 + [
1558 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1559 Interpolate(
1560 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
1561 target=target,
1562 subtarget=subtarget,
1563 ),
1564 Interpolate(
1565 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
1566 url=GetRsyncParams.withArgs("bin", "url"),
1567 target=target,
1568 subtarget=subtarget,
1569 prefix=GetVersionPrefix,
1570 ),
1571 ],
1572 env={
1573 "RSYNC_PASSWORD": Interpolate(
1574 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1575 )
1576 },
1577 haltOnFailure=True,
1578 logEnviron=False,
1579 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1580 )
1581 )
1582
1583 # delete files which don't exist locally
1584 factory.addStep(
1585 ShellCommand(
1586 name="targetprune",
1587 description="Pruning target files",
1588 descriptionDone="Target files pruned",
1589 command=[
1590 "../rsync.sh",
1591 "--exclude=/kmods/",
1592 "--delete",
1593 "--existing",
1594 "--ignore-existing",
1595 "--delay-updates",
1596 "--partial-dir=.~tmp~%s~%s" % (target, subtarget),
1597 ]
1598 + rsync_defopts
1599 + [
1600 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1601 Interpolate(
1602 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
1603 target=target,
1604 subtarget=subtarget,
1605 ),
1606 Interpolate(
1607 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
1608 url=GetRsyncParams.withArgs("bin", "url"),
1609 target=target,
1610 subtarget=subtarget,
1611 prefix=GetVersionPrefix,
1612 ),
1613 ],
1614 env={
1615 "RSYNC_PASSWORD": Interpolate(
1616 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1617 )
1618 },
1619 haltOnFailure=True,
1620 logEnviron=False,
1621 locks=NetLockUl,
1622 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1623 )
1624 )
1625
1626 factory.addStep(
1627 ShellCommand(
1628 name="kmodupload",
1629 description="Uploading kmod archive",
1630 descriptionDone="Kmod archive uploaded",
1631 command=[
1632 "../rsync.sh",
1633 "--delete",
1634 "--delay-updates",
1635 "--partial-dir=.~tmp~%s~%s" % (target, subtarget),
1636 ]
1637 + rsync_defopts
1638 + [
1639 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1640 Interpolate(
1641 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1642 target=target,
1643 subtarget=subtarget,
1644 ),
1645 Interpolate(
1646 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/",
1647 url=GetRsyncParams.withArgs("bin", "url"),
1648 target=target,
1649 subtarget=subtarget,
1650 prefix=GetVersionPrefix,
1651 ),
1652 ],
1653 env={
1654 "RSYNC_PASSWORD": Interpolate(
1655 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1656 )
1657 },
1658 haltOnFailure=True,
1659 logEnviron=False,
1660 locks=NetLockUl,
1661 doStepIf=IsKmodArchiveAndRsyncEnabled,
1662 )
1663 )
1664
1665 factory.addStep(
1666 ShellCommand(
1667 name="sourcelist",
1668 description="Finding source archives to upload",
1669 descriptionDone="Source archives to upload found",
1670 command="find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1671 haltOnFailure=True,
1672 )
1673 )
1674
1675 factory.addStep(
1676 ShellCommand(
1677 name="sourceupload",
1678 description="Uploading source archives",
1679 descriptionDone="Source archives uploaded",
1680 command=[
1681 "../rsync.sh",
1682 "--files-from=sourcelist",
1683 "--size-only",
1684 "--delay-updates",
1685 ]
1686 + rsync_defopts
1687 + [
1688 Interpolate(
1689 "--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s",
1690 target=target,
1691 subtarget=subtarget,
1692 ),
1693 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1694 "dl/",
1695 Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url")),
1696 ],
1697 env={
1698 "RSYNC_PASSWORD": Interpolate(
1699 "%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")
1700 )
1701 },
1702 haltOnFailure=True,
1703 logEnviron=False,
1704 locks=NetLockUl,
1705 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1706 )
1707 )
1708
1709 factory.addStep(
1710 ShellCommand(
1711 name="df",
1712 description="Reporting disk usage",
1713 command=["df", "-h", "."],
1714 env={"LC_ALL": "C"},
1715 logEnviron=False,
1716 haltOnFailure=False,
1717 flunkOnFailure=False,
1718 warnOnFailure=False,
1719 alwaysRun=True,
1720 )
1721 )
1722
1723 factory.addStep(
1724 ShellCommand(
1725 name="du",
1726 description="Reporting estimated file space usage",
1727 command=["du", "-sh", "."],
1728 env={"LC_ALL": "C"},
1729 logEnviron=False,
1730 haltOnFailure=False,
1731 flunkOnFailure=False,
1732 warnOnFailure=False,
1733 alwaysRun=True,
1734 )
1735 )
1736
1737 factory.addStep(
1738 ShellCommand(
1739 name="ccachestat",
1740 description="Reporting ccache stats",
1741 command=["ccache", "-s"],
1742 logEnviron=False,
1743 want_stderr=False,
1744 haltOnFailure=False,
1745 flunkOnFailure=False,
1746 warnOnFailure=False,
1747 doStepIf=util.Transform(bool, Property("ccache_command")),
1748 )
1749 )
1750
1751 return factory
1752
1753
1754 for brname in branchNames:
1755 for target in targets[brname]:
1756 bldrname = brname + "_" + target
1757 c["builders"].append(
1758 BuilderConfig(
1759 name=bldrname,
1760 workernames=workerNames,
1761 factory=prepareFactory(target),
1762 tags=[
1763 brname,
1764 ],
1765 nextBuild=GetNextBuild,
1766 canStartBuild=canStartBuild,
1767 )
1768 )
1769
1770
1771 ####### STATUS TARGETS
1772
1773 # 'status' is a list of Status Targets. The results of each build will be
1774 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1775 # including web pages, email senders, and IRC bots.
1776
1777 if "status_bind" in inip1:
1778 c["www"] = {
1779 "port": inip1.get("status_bind"),
1780 "plugins": {"waterfall_view": True, "console_view": True, "grid_view": True},
1781 }
1782
1783 if "status_user" in inip1 and "status_password" in inip1:
1784 c["www"]["auth"] = util.UserPasswordAuth(
1785 [(inip1.get("status_user"), inip1.get("status_password"))]
1786 )
1787 c["www"]["authz"] = util.Authz(
1788 allowRules=[util.AnyControlEndpointMatcher(role="admins")],
1789 roleMatchers=[
1790 util.RolesFromUsername(
1791 roles=["admins"], usernames=[inip1.get("status_user")]
1792 )
1793 ],
1794 )
1795
1796 c["services"] = []
1797 if ini.has_section("irc"):
1798 iniirc = ini["irc"]
1799 irc_host = iniirc.get("host", None)
1800 irc_port = iniirc.getint("port", 6667)
1801 irc_chan = iniirc.get("channel", None)
1802 irc_nick = iniirc.get("nickname", None)
1803 irc_pass = iniirc.get("password", None)
1804
1805 if irc_host and irc_nick and irc_chan:
1806 irc = reporters.IRC(
1807 irc_host,
1808 irc_nick,
1809 port=irc_port,
1810 password=irc_pass,
1811 channels=[irc_chan],
1812 notify_events=["exception", "problem", "recovery"],
1813 )
1814
1815 c["services"].append(irc)
1816
1817 c["revlink"] = util.RevlinkMatch(
1818 [r"https://git.openwrt.org/openwrt/(.*).git"],
1819 r"https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s",
1820 )
1821
1822 ####### DB URL
1823
1824 c["db"] = {
1825 # This specifies what database buildbot uses to store its state. You can leave
1826 # this at its default for all but the largest installations.
1827 "db_url": "sqlite:///state.sqlite",
1828 }
1829
1830 c["buildbotNetUsageData"] = None