d4438119ee01507563e200955a06866863878a37
[buildbot.git] / phase1 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import subprocess
8 import configparser
9
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
12
13 from twisted.internet import defer
14 from twisted.python import log
15
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
42
43
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
47
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
50
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
53
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
56
57 inip1 = ini["phase1"]
58
59 # Globals
60 work_dir = os.path.abspath(ini["general"].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
62
63 repo_url = ini["repo"].get("url")
64
65 rsync_defopts = ["-v", "--timeout=120"]
66
67 # if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
69
70 branches = {}
71
72
73 def ini_parse_branch(section):
74 b = {}
75 name = section.get("name")
76
77 if not name:
78 raise ValueError("missing 'name' in " + repr(section))
79 if name in branches:
80 raise ValueError("duplicate branch name in " + repr(section))
81
82 b["name"] = name
83 b["bin_url"] = section.get("binary_url")
84 b["bin_key"] = section.get("binary_password")
85
86 b["src_url"] = section.get("source_url")
87 b["src_key"] = section.get("source_password")
88
89 b["gpg_key"] = section.get("gpg_key")
90
91 b["usign_key"] = section.get("usign_key")
92 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
93 b["usign_comment"] = section.get("usign_comment", usign_comment)
94
95 b["config_seed"] = section.get("config_seed")
96
97 b["kmod_archive"] = section.getboolean("kmod_archive", False)
98
99 branches[name] = b
100 log.msg("Configured branch: {}".format(name))
101
102
103 # PB port can be either a numeric port or a connection string
104 pb_port = inip1.get("port") or 9989
105
106 # This is the dictionary that the buildmaster pays attention to. We also use
107 # a shorter alias to save typing.
108 c = BuildmasterConfig = {}
109
110 ####### PROJECT IDENTITY
111
112 # the 'title' string will appear at the top of this buildbot
113 # installation's html.WebStatus home page (linked to the
114 # 'titleURL') and is embedded in the title of the waterfall HTML page.
115
116 c["title"] = ini["general"].get("title")
117 c["titleURL"] = ini["general"].get("title_url")
118
119 # the 'buildbotURL' string should point to the location where the buildbot's
120 # internal web server (usually the html.WebStatus page) is visible. This
121 # typically uses the port number set in the Waterfall 'status' entry, but
122 # with an externally-visible host name which the buildbot cannot figure out
123 # without some help.
124
125 c["buildbotURL"] = inip1.get("buildbot_url")
126
127 ####### BUILDWORKERS
128
129 # The 'workers' list defines the set of recognized buildworkers. Each element is
130 # a Worker object, specifying a unique worker name and password. The same
131 # worker name and password must be configured on the worker.
132
133 c["workers"] = []
134 NetLocks = dict()
135
136
137 def ini_parse_workers(section):
138 name = section.get("name")
139 password = section.get("password")
140 phase = section.getint("phase")
141 tagonly = section.getboolean("tag_only")
142 rsyncipv4 = section.getboolean("rsync_ipv4")
143
144 if not name or not password or not phase == 1:
145 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
146 return
147
148 sl_props = {"tag_only": tagonly}
149 if "dl_lock" in section:
150 lockname = section.get("dl_lock")
151 sl_props["dl_lock"] = lockname
152 if lockname not in NetLocks:
153 NetLocks[lockname] = locks.MasterLock(lockname)
154 if "ul_lock" in section:
155 lockname = section.get("ul_lock")
156 sl_props["ul_lock"] = lockname
157 if lockname not in NetLocks:
158 NetLocks[lockname] = locks.MasterLock(lockname)
159 if rsyncipv4:
160 sl_props[
161 "rsync_ipv4"
162 ] = True # only set prop if required, we use '+' Interpolate substitution
163
164 log.msg("Configured worker: {}".format(name))
165 # NB: phase1 build factory requires workers to be single-build only
166 c["workers"].append(Worker(name, password, max_builds=1, properties=sl_props))
167
168
169 for section in ini.sections():
170 if section.startswith("branch "):
171 ini_parse_branch(ini[section])
172
173 if section.startswith("worker "):
174 ini_parse_workers(ini[section])
175
176 # list of branches in build-priority order
177 branchNames = [branches[b]["name"] for b in branches]
178
179 c["protocols"] = {"pb": {"port": pb_port}}
180
181 # coalesce builds
182 c["collapseRequests"] = True
183
184 # Reduce amount of backlog data
185 c["configurators"] = [
186 util.JanitorConfigurator(
187 logHorizon=timedelta(days=3),
188 hour=6,
189 )
190 ]
191
192
193 @defer.inlineCallbacks
194 def getNewestCompleteTime(bldr):
195 """Returns the complete_at of the latest completed and not SKIPPED
196 build request for this builder, or None if there are no such build
197 requests. We need to filter out SKIPPED requests because we're
198 using collapseRequests=True which is unfortunately marking all
199 previous requests as complete when new buildset is created.
200
201 @returns: datetime instance or None, via Deferred
202 """
203
204 bldrid = yield bldr.getBuilderId()
205 completed = yield bldr.master.data.get(
206 ("builders", bldrid, "buildrequests"),
207 [
208 resultspec.Filter("complete", "eq", [True]),
209 resultspec.Filter("results", "ne", [results.SKIPPED]),
210 ],
211 order=["-complete_at"],
212 limit=1,
213 )
214 if not completed:
215 return
216
217 complete_at = completed[0]["complete_at"]
218
219 last_build = yield bldr.master.data.get(
220 ("builds",),
221 [
222 resultspec.Filter("builderid", "eq", [bldrid]),
223 ],
224 order=["-started_at"],
225 limit=1,
226 )
227
228 if last_build and last_build[0]:
229 last_complete_at = last_build[0]["complete_at"]
230 if last_complete_at and (last_complete_at > complete_at):
231 return last_complete_at
232
233 return complete_at
234
235
236 @defer.inlineCallbacks
237 def prioritizeBuilders(master, builders):
238 """Returns sorted list of builders by their last timestamp of completed and
239 not skipped build, ordered first by branch name.
240
241 @returns: list of sorted builders
242 """
243
244 bldrNamePrio = {"__Janitor": 0, "00_force_build": 0}
245 i = 1
246 for bname in branchNames:
247 bldrNamePrio[bname] = i
248 i += 1
249
250 def is_building(bldr):
251 return bool(bldr.building) or bool(bldr.old_building)
252
253 def bldr_info(bldr):
254 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
255 d.addCallback(lambda complete_at: (complete_at, bldr))
256 return d
257
258 def bldr_sort(item):
259 (complete_at, bldr) = item
260
261 pos = 99
262 for name, prio in bldrNamePrio.items():
263 if bldr.name.startswith(name):
264 pos = prio
265 break
266
267 if not complete_at:
268 date = datetime.min
269 complete_at = date.replace(tzinfo=tzutc())
270
271 if is_building(bldr):
272 date = datetime.max
273 complete_at = date.replace(tzinfo=tzutc())
274
275 return (pos, complete_at, bldr.name)
276
277 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
278 results.sort(key=bldr_sort)
279
280 # for r in results:
281 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
282
283 return [r[1] for r in results]
284
285
286 c["prioritizeBuilders"] = prioritizeBuilders
287
288 ####### CHANGESOURCES
289
290 # find targets
291 targets = dict()
292
293
294 def populateTargets():
295 """fetch a shallow clone of each configured branch in turn:
296 execute dump-target-info.pl and collate the results to ensure
297 targets that only exist in specific branches get built.
298 This takes a while during master startup but is executed only once.
299 """
300 sourcegit = work_dir + "/source.git"
301 for branch in branchNames:
302 log.msg(f"Populating targets for {branch}, this will take time")
303
304 if os.path.isdir(sourcegit):
305 subprocess.call(["rm", "-rf", sourcegit])
306
307 subprocess.call(
308 [
309 "git",
310 "clone",
311 "-q",
312 "--depth=1",
313 "--branch=" + branch,
314 repo_url,
315 sourcegit,
316 ]
317 )
318
319 os.makedirs(sourcegit + "/tmp", exist_ok=True)
320 findtargets = subprocess.Popen(
321 ["./scripts/dump-target-info.pl", "targets"],
322 stdout=subprocess.PIPE,
323 stderr=subprocess.DEVNULL,
324 cwd=sourcegit,
325 )
326
327 targets[branch] = set()
328 while True:
329 line = findtargets.stdout.readline()
330 if not line:
331 break
332 ta = line.decode().strip().split(" ")
333 targets[branch].add(ta[0])
334
335 subprocess.call(["rm", "-rf", sourcegit])
336
337
338 populateTargets()
339
340 # the 'change_source' setting tells the buildmaster how it should find out
341 # about source code changes.
342
343 c["change_source"] = []
344 c["change_source"].append(
345 GitPoller(
346 repo_url,
347 workdir=work_dir + "/work.git",
348 branches=branchNames,
349 pollAtLaunch=True,
350 pollinterval=300,
351 )
352 )
353
354 ####### SCHEDULERS
355
356 # Configure the Schedulers, which decide how to react to incoming changes.
357
358
359 # Selector for known valid tags
360 class TagChoiceParameter(BaseParameter):
361 spec_attributes = ["strict", "choices"]
362 type = "list"
363 strict = True
364
365 def __init__(self, name, label=None, **kw):
366 super().__init__(name, label, **kw)
367 self._choice_list = []
368
369 def getRevTags(self, findtag=None):
370 taglist = []
371 branchvers = []
372
373 # we will filter out tags that do no match the configured branches
374 for b in branchNames:
375 basever = re.search(r"-([0-9]+\.[0-9]+)$", b)
376 if basever:
377 branchvers.append(basever[1])
378
379 # grab tags from remote repository
380 alltags = subprocess.Popen(
381 ["git", "ls-remote", "--tags", repo_url], stdout=subprocess.PIPE
382 )
383
384 while True:
385 line = alltags.stdout.readline()
386
387 if not line:
388 break
389
390 (rev, tag) = line.split()
391
392 # does it match known format? ('vNN.NN.NN(-rcN)')
393 tagver = re.search(
394 r"\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$",
395 tag.decode().strip(),
396 )
397
398 # only list valid tags matching configured branches
399 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
400 # if we want a specific tag, ignore all that don't match
401 if findtag and findtag != tagver[1]:
402 continue
403 taglist.append({"rev": rev.decode().strip(), "tag": tagver[1]})
404
405 return taglist
406
407 @property
408 def choices(self):
409 taglist = [rt["tag"] for rt in self.getRevTags()]
410 taglist.sort(
411 reverse=True,
412 key=lambda tag: tag if re.search(r"-rc[0-9]+$", tag) else tag + "-z",
413 )
414 taglist.insert(0, "")
415
416 self._choice_list = taglist
417
418 return self._choice_list
419
420 def updateFromKwargs(self, properties, kwargs, **unused):
421 tag = self.getFromKwargs(kwargs)
422 properties[self.name] = tag
423
424 # find the commit matching the tag
425 findtag = self.getRevTags(tag)
426
427 if not findtag:
428 raise ValidationError("Couldn't find tag")
429
430 properties["force_revision"] = findtag[0]["rev"]
431
432 # find the branch matching the tag
433 branch = None
434 branchver = re.search(r"v([0-9]+\.[0-9]+)", tag)
435 for b in branchNames:
436 if b.endswith(branchver[1]):
437 branch = b
438
439 if not branch:
440 raise ValidationError("Couldn't find branch")
441
442 properties["force_branch"] = branch
443
444 def parse_from_arg(self, s):
445 if self.strict and s not in self._choice_list:
446 raise ValidationError(
447 "'%s' does not belong to list of available choices '%s'"
448 % (s, self._choice_list)
449 )
450 return s
451
452
453 @util.renderer
454 @defer.inlineCallbacks
455 def builderNames(props):
456 """since we have per branch and per target builders,
457 address the relevant builder for each new buildrequest
458 based on the request's desired branch and target.
459 """
460 branch = props.getProperty("branch")
461 target = props.getProperty("target", "")
462
463 if target == "all":
464 target = ""
465
466 # if that didn't work, try sourcestamp to find a branch
467 if not branch:
468 # match builders with target branch
469 ss = props.sourcestamps[0]
470 if ss:
471 branch = ss["branch"]
472 else:
473 log.msg("couldn't find builder")
474 return [] # nothing works
475
476 bname = branch + "_" + target
477 builders = []
478
479 for b in (yield props.master.data.get(("builders",))):
480 if not b["name"].startswith(bname):
481 continue
482 builders.append(b["name"])
483
484 return builders
485
486
487 c["schedulers"] = []
488 c["schedulers"].append(
489 AnyBranchScheduler(
490 name="all",
491 change_filter=util.ChangeFilter(branch=branchNames),
492 treeStableTimer=15 * 60,
493 builderNames=builderNames,
494 )
495 )
496
497 c["schedulers"].append(
498 ForceScheduler(
499 name="force",
500 buttonName="Force builds",
501 label="Force build details",
502 builderNames=["00_force_build"],
503 codebases=[
504 util.CodebaseParameter(
505 "",
506 label="Repository",
507 branch=util.FixedParameter(name="branch", default=""),
508 revision=util.FixedParameter(name="revision", default=""),
509 repository=util.FixedParameter(name="repository", default=""),
510 project=util.FixedParameter(name="project", default=""),
511 )
512 ],
513 reason=util.StringParameter(
514 name="reason",
515 label="Reason",
516 default="Trigger build",
517 required=True,
518 size=80,
519 ),
520 properties=[
521 # NB: avoid nesting to simplify processing of properties
522 util.ChoiceStringParameter(
523 name="target",
524 label="Build target",
525 default="all",
526 choices=["all"] + [t for b in branchNames for t in targets[b]],
527 ),
528 TagChoiceParameter(name="tag", label="Build tag", default=""),
529 ],
530 )
531 )
532
533 c["schedulers"].append(
534 schedulers.Triggerable(name="trigger", builderNames=builderNames)
535 )
536
537 ####### BUILDERS
538
539 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
540 # what steps, and which workers can execute them. Note that any particular build will
541 # only take place on one worker.
542
543
544 def IsNoMasterBuild(step):
545 return step.getProperty("branch") != "master"
546
547
548 def IsUsignEnabled(step):
549 branch = step.getProperty("branch")
550 return branch and branches[branch].get("usign_key")
551
552
553 def IsSignEnabled(step):
554 branch = step.getProperty("branch")
555 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
556
557
558 def IsKmodArchiveEnabled(step):
559 branch = step.getProperty("branch")
560 return branch and branches[branch].get("kmod_archive")
561
562
563 def IsKmodArchiveAndRsyncEnabled(step):
564 branch = step.getProperty("branch")
565 return bool(IsKmodArchiveEnabled(step) and branches[branch].get("bin_url"))
566
567
568 def GetBaseVersion(branch):
569 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
570 return branch.split("-")[1]
571 else:
572 return "master"
573
574
575 @properties.renderer
576 def GetVersionPrefix(props):
577 branch = props.getProperty("branch")
578 basever = GetBaseVersion(branch)
579 if props.hasProperty("tag") and re.match(
580 r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]
581 ):
582 return "%s/" % props["tag"][1:]
583 elif basever != "master":
584 return "%s-SNAPSHOT/" % basever
585 else:
586 return ""
587
588
589 @util.renderer
590 def GetConfigSeed(props):
591 branch = props.getProperty("branch")
592 return branch and branches[branch].get("config_seed") or ""
593
594
595 @util.renderer
596 def GetRsyncParams(props, srcorbin, urlorkey):
597 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
598 branch = props.getProperty("branch")
599 opt = srcorbin + "_" + urlorkey
600 return branch and branches[branch].get(opt)
601
602
603 @util.renderer
604 def GetUsignKey(props):
605 branch = props.getProperty("branch")
606 return branch and branches[branch].get("usign_key")
607
608
609 def GetNextBuild(builder, requests):
610 for r in requests:
611 if r.properties:
612 # order tagged build first
613 if r.properties.hasProperty("tag"):
614 return r
615
616 r = requests[0]
617 # log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
618 return r
619
620
621 def MakeEnv(overrides=None, tryccache=False):
622 env = {
623 "CCC": Interpolate("%(prop:cc_command:-gcc)s"),
624 "CCXX": Interpolate("%(prop:cxx_command:-g++)s"),
625 }
626 if tryccache:
627 env["CC"] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
628 env["CXX"] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
629 env["CCACHE"] = Interpolate("%(prop:ccache_command:-)s")
630 else:
631 env["CC"] = env["CCC"]
632 env["CXX"] = env["CCXX"]
633 env["CCACHE"] = ""
634 if overrides is not None:
635 env.update(overrides)
636 return env
637
638
639 @properties.renderer
640 def NetLockDl(props, extralock=None):
641 lock = None
642 if props.hasProperty("dl_lock"):
643 lock = NetLocks[props["dl_lock"]]
644 if lock is not None:
645 return [lock.access("exclusive")]
646 else:
647 return []
648
649
650 @properties.renderer
651 def NetLockUl(props):
652 lock = None
653 if props.hasProperty("ul_lock"):
654 lock = NetLocks[props["ul_lock"]]
655 if lock is not None:
656 return [lock.access("exclusive")]
657 else:
658 return []
659
660
661 def IsTargetSelected(target):
662 def CheckTargetProperty(step):
663 selected_target = step.getProperty("target", "all")
664 if selected_target != "all" and selected_target != target:
665 return False
666 return True
667
668 return CheckTargetProperty
669
670
671 @util.renderer
672 def UsignSec2Pub(props):
673 branch = props.getProperty("branch")
674 try:
675 comment = (
676 branches[branch].get("usign_comment") or "untrusted comment: secret key"
677 )
678 seckey = branches[branch].get("usign_key")
679 seckey = base64.b64decode(seckey)
680 except Exception:
681 return None
682
683 return "{}\n{}".format(
684 re.sub(r"\bsecret key$", "public key", comment),
685 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]),
686 )
687
688
689 def canStartBuild(builder, wfb, request):
690 """filter out non tag requests for tag_only workers."""
691 wtagonly = wfb.worker.properties.getProperty("tag_only")
692 tag = request.properties.getProperty("tag")
693
694 if wtagonly and not tag:
695 return False
696
697 return True
698
699
700 c["builders"] = []
701
702 workerNames = []
703
704 for worker in c["workers"]:
705 workerNames.append(worker.workername)
706
707 # add a single LocalWorker to handle the forcebuild builder
708 c["workers"].append(LocalWorker("__local_force_build", max_builds=1))
709
710 force_factory = BuildFactory()
711 force_factory.addStep(
712 steps.Trigger(
713 name="trigger_build",
714 schedulerNames=["trigger"],
715 sourceStamps=[
716 {
717 "codebase": "",
718 "branch": Property("force_branch"),
719 "revision": Property("force_revision"),
720 "repository": repo_url,
721 "project": "",
722 }
723 ],
724 set_properties={
725 "reason": Property("reason"),
726 "tag": Property("tag"),
727 "target": Property("target"),
728 },
729 )
730 )
731
732 c["builders"].append(
733 BuilderConfig(
734 name="00_force_build", workername="__local_force_build", factory=force_factory
735 )
736 )
737
738
739 # NB the phase1 build factory assumes workers are single-build only
740 def prepareFactory(target):
741 ts = target.split("/")
742
743 factory = BuildFactory()
744
745 # setup shared work directory if required
746 factory.addStep(
747 ShellCommand(
748 name="sharedwd",
749 descriptionDone="Shared work directory set up",
750 command='test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
751 workdir=".",
752 haltOnFailure=True,
753 )
754 )
755
756 # find number of cores
757 factory.addStep(
758 SetPropertyFromCommand(
759 name="nproc",
760 property="nproc",
761 description="Finding number of CPUs",
762 command=["nproc"],
763 )
764 )
765
766 # find gcc and g++ compilers
767 factory.addStep(
768 FileDownload(
769 name="dlfindbinpl",
770 mastersrc=scripts_dir + "/findbin.pl",
771 workerdest="../findbin.pl",
772 mode=0o755,
773 )
774 )
775
776 factory.addStep(
777 SetPropertyFromCommand(
778 name="gcc",
779 property="cc_command",
780 description="Finding gcc command",
781 command=["../findbin.pl", "gcc", "", ""],
782 haltOnFailure=True,
783 )
784 )
785
786 factory.addStep(
787 SetPropertyFromCommand(
788 name="g++",
789 property="cxx_command",
790 description="Finding g++ command",
791 command=["../findbin.pl", "g++", "", ""],
792 haltOnFailure=True,
793 )
794 )
795
796 # see if ccache is available
797 factory.addStep(
798 SetPropertyFromCommand(
799 name="ccache",
800 property="ccache_command",
801 description="Testing for ccache command",
802 command=["which", "ccache"],
803 haltOnFailure=False,
804 flunkOnFailure=False,
805 warnOnFailure=False,
806 hideStepIf=lambda r, s: r == results.FAILURE,
807 )
808 )
809
810 # check out the source
811 # Git() runs:
812 # if repo doesn't exist: 'git clone repourl'
813 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
814 # git cat-file -e <commit>
815 # git checkout -f <commit>
816 # git checkout -B <branch>
817 # git rev-parse HEAD
818 factory.addStep(
819 Git(
820 name="git",
821 repourl=repo_url,
822 mode="full",
823 method="fresh",
824 locks=NetLockDl,
825 haltOnFailure=True,
826 )
827 )
828
829 # workaround for https://github.com/openwrt/buildbot/issues/5
830 factory.addStep(
831 Git(
832 name="git me once more please",
833 repourl=repo_url,
834 mode="full",
835 method="fresh",
836 locks=NetLockDl,
837 haltOnFailure=True,
838 )
839 )
840
841 # update remote refs
842 factory.addStep(
843 ShellCommand(
844 name="fetchrefs",
845 description="Fetching Git remote refs",
846 descriptionDone="Git remote refs fetched",
847 command=[
848 "git",
849 "fetch",
850 "origin",
851 Interpolate(
852 "+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s"
853 ),
854 ],
855 haltOnFailure=True,
856 )
857 )
858
859 # getver.sh requires local branches to track upstream otherwise version computation fails.
860 # Git() does not set tracking branches when cloning or switching, so work around this here
861 factory.addStep(
862 ShellCommand(
863 name="trackupstream",
864 description="Setting upstream branch",
865 descriptionDone="getver.sh is happy now",
866 command=["git", "branch", "-u", Interpolate("origin/%(prop:branch)s")],
867 haltOnFailure=True,
868 )
869 )
870
871 # Verify that Git HEAD points to a tag or branch
872 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
873 factory.addStep(
874 ShellCommand(
875 name="gitverify",
876 description="Ensuring that Git HEAD is pointing to a branch or tag",
877 descriptionDone="Git HEAD is sane",
878 command='git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
879 haltOnFailure=True,
880 )
881 )
882
883 factory.addStep(
884 StringDownload(
885 name="ccachecc",
886 s='#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
887 workerdest="../ccache_cc.sh",
888 mode=0o755,
889 )
890 )
891
892 factory.addStep(
893 StringDownload(
894 name="ccachecxx",
895 s='#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
896 workerdest="../ccache_cxx.sh",
897 mode=0o755,
898 )
899 )
900
901 # feed
902 factory.addStep(
903 ShellCommand(
904 name="updatefeeds",
905 description="Updating feeds",
906 command=["./scripts/feeds", "update"],
907 env=MakeEnv(tryccache=True),
908 haltOnFailure=True,
909 locks=NetLockDl,
910 )
911 )
912
913 # feed
914 factory.addStep(
915 ShellCommand(
916 name="installfeeds",
917 description="Installing feeds",
918 command=["./scripts/feeds", "install", "-a"],
919 env=MakeEnv(tryccache=True),
920 haltOnFailure=True,
921 )
922 )
923
924 # seed config
925 factory.addStep(
926 StringDownload(
927 name="dlconfigseed",
928 s=Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
929 workerdest=".config",
930 mode=0o644,
931 )
932 )
933
934 # configure
935 factory.addStep(
936 ShellCommand(
937 name="newconfig",
938 descriptionDone=".config seeded",
939 command=Interpolate(
940 "printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config",
941 target=ts[0],
942 subtarget=ts[1],
943 usign=GetUsignKey,
944 ),
945 )
946 )
947
948 factory.addStep(
949 ShellCommand(
950 name="defconfig",
951 description="Populating .config",
952 command=["make", "defconfig"],
953 env=MakeEnv(),
954 )
955 )
956
957 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
958 factory.addStep(
959 ShellCommand(
960 name="checkarch",
961 description="Checking architecture",
962 descriptionDone="Architecture validated",
963 command='grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config'
964 % (ts[0], ts[1]),
965 logEnviron=False,
966 want_stdout=False,
967 want_stderr=False,
968 haltOnFailure=True,
969 flunkOnFailure=False, # this is not a build FAILURE - TODO mark build as SKIPPED
970 )
971 )
972
973 # find libc suffix
974 factory.addStep(
975 SetPropertyFromCommand(
976 name="libc",
977 property="libc",
978 description="Finding libc suffix",
979 command=[
980 "sed",
981 "-ne",
982 '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }',
983 ".config",
984 ],
985 )
986 )
987
988 # install build key
989 factory.addStep(
990 StringDownload(
991 name="dlkeybuildpub",
992 s=Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
993 workerdest="key-build.pub",
994 mode=0o600,
995 doStepIf=IsUsignEnabled,
996 )
997 )
998
999 factory.addStep(
1000 StringDownload(
1001 name="dlkeybuild",
1002 s="# fake private key",
1003 workerdest="key-build",
1004 mode=0o600,
1005 doStepIf=IsUsignEnabled,
1006 )
1007 )
1008
1009 factory.addStep(
1010 StringDownload(
1011 name="dlkeybuilducert",
1012 s="# fake certificate",
1013 workerdest="key-build.ucert",
1014 mode=0o600,
1015 doStepIf=IsUsignEnabled,
1016 )
1017 )
1018
1019 # prepare dl
1020 factory.addStep(
1021 ShellCommand(
1022 name="dldir",
1023 description="Preparing dl/",
1024 descriptionDone="dl/ prepared",
1025 command='mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
1026 workdir=Property("builddir"),
1027 logEnviron=False,
1028 want_stdout=False,
1029 )
1030 )
1031
1032 # cleanup dl
1033 factory.addStep(
1034 ShellCommand(
1035 name="dlprune",
1036 description="Pruning dl/",
1037 descriptionDone="dl/ pruned",
1038 command="find dl/ -mindepth 1 -atime +15 -delete -print",
1039 logEnviron=False,
1040 )
1041 )
1042
1043 # prepare tar
1044 factory.addStep(
1045 ShellCommand(
1046 name="dltar",
1047 description="Building and installing GNU tar",
1048 descriptionDone="GNU tar built and installed",
1049 command=[
1050 "make",
1051 Interpolate("-j%(prop:nproc:-1)s"),
1052 "tools/tar/compile",
1053 "V=s",
1054 ],
1055 env=MakeEnv(tryccache=True),
1056 haltOnFailure=True,
1057 )
1058 )
1059
1060 # populate dl
1061 factory.addStep(
1062 ShellCommand(
1063 name="dlrun",
1064 description="Populating dl/",
1065 descriptionDone="dl/ populated",
1066 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
1067 env=MakeEnv(),
1068 logEnviron=False,
1069 locks=NetLockDl,
1070 )
1071 )
1072
1073 factory.addStep(
1074 ShellCommand(
1075 name="cleanbase",
1076 description="Cleaning base-files",
1077 command=["make", "package/base-files/clean", "V=s"],
1078 )
1079 )
1080
1081 # build
1082 factory.addStep(
1083 ShellCommand(
1084 name="tools",
1085 description="Building and installing tools",
1086 descriptionDone="Tools built and installed",
1087 command=[
1088 "make",
1089 Interpolate("-j%(prop:nproc:-1)s"),
1090 "tools/install",
1091 "V=s",
1092 ],
1093 env=MakeEnv(tryccache=True),
1094 haltOnFailure=True,
1095 )
1096 )
1097
1098 factory.addStep(
1099 ShellCommand(
1100 name="toolchain",
1101 description="Building and installing toolchain",
1102 descriptionDone="Toolchain built and installed",
1103 command=[
1104 "make",
1105 Interpolate("-j%(prop:nproc:-1)s"),
1106 "toolchain/install",
1107 "V=s",
1108 ],
1109 env=MakeEnv(),
1110 haltOnFailure=True,
1111 )
1112 )
1113
1114 factory.addStep(
1115 ShellCommand(
1116 name="kmods",
1117 description="Building kmods",
1118 descriptionDone="Kmods built",
1119 command=[
1120 "make",
1121 Interpolate("-j%(prop:nproc:-1)s"),
1122 "target/compile",
1123 "V=s",
1124 "IGNORE_ERRORS=n m",
1125 "BUILD_LOG=1",
1126 ],
1127 env=MakeEnv(),
1128 haltOnFailure=True,
1129 )
1130 )
1131
1132 # find kernel version
1133 factory.addStep(
1134 SetPropertyFromCommand(
1135 name="kernelversion",
1136 property="kernelversion",
1137 description="Finding the effective Kernel version",
1138 command="make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
1139 env={"TOPDIR": Interpolate("%(prop:builddir)s/build")},
1140 )
1141 )
1142
1143 factory.addStep(
1144 ShellCommand(
1145 name="pkgclean",
1146 description="Cleaning up package build",
1147 descriptionDone="Package build cleaned up",
1148 command=["make", "package/cleanup", "V=s"],
1149 )
1150 )
1151
1152 factory.addStep(
1153 ShellCommand(
1154 name="pkgbuild",
1155 description="Building packages",
1156 descriptionDone="Packages built",
1157 command=[
1158 "make",
1159 Interpolate("-j%(prop:nproc:-1)s"),
1160 "package/compile",
1161 "V=s",
1162 "IGNORE_ERRORS=n m",
1163 "BUILD_LOG=1",
1164 ],
1165 env=MakeEnv(),
1166 haltOnFailure=True,
1167 )
1168 )
1169
1170 factory.addStep(
1171 ShellCommand(
1172 name="pkginstall",
1173 description="Installing packages",
1174 descriptionDone="Packages installed",
1175 command=[
1176 "make",
1177 Interpolate("-j%(prop:nproc:-1)s"),
1178 "package/install",
1179 "V=s",
1180 ],
1181 env=MakeEnv(),
1182 haltOnFailure=True,
1183 )
1184 )
1185
1186 factory.addStep(
1187 ShellCommand(
1188 name="pkgindex",
1189 description="Indexing packages",
1190 descriptionDone="Packages indexed",
1191 command=[
1192 "make",
1193 Interpolate("-j%(prop:nproc:-1)s"),
1194 "package/index",
1195 "V=s",
1196 "CONFIG_SIGNED_PACKAGES=",
1197 ],
1198 env=MakeEnv(),
1199 haltOnFailure=True,
1200 )
1201 )
1202
1203 factory.addStep(
1204 ShellCommand(
1205 name="images",
1206 description="Building and installing images",
1207 descriptionDone="Images built and installed",
1208 command=[
1209 "make",
1210 Interpolate("-j%(prop:nproc:-1)s"),
1211 "target/install",
1212 "V=s",
1213 ],
1214 env=MakeEnv(),
1215 haltOnFailure=True,
1216 )
1217 )
1218
1219 factory.addStep(
1220 ShellCommand(
1221 name="buildinfo",
1222 description="Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
1223 command="make -j1 buildinfo V=s || true",
1224 env=MakeEnv(),
1225 haltOnFailure=True,
1226 )
1227 )
1228
1229 factory.addStep(
1230 ShellCommand(
1231 name="json_overview_image_info",
1232 description="Generating profiles.json in target folder",
1233 command="make -j1 json_overview_image_info V=s || true",
1234 env=MakeEnv(),
1235 haltOnFailure=True,
1236 )
1237 )
1238
1239 factory.addStep(
1240 ShellCommand(
1241 name="checksums",
1242 description="Calculating checksums",
1243 descriptionDone="Checksums calculated",
1244 command=["make", "-j1", "checksum", "V=s"],
1245 env=MakeEnv(),
1246 haltOnFailure=True,
1247 )
1248 )
1249
1250 factory.addStep(
1251 ShellCommand(
1252 name="kmoddir",
1253 descriptionDone="Kmod directory created",
1254 command=[
1255 "mkdir",
1256 "-p",
1257 Interpolate(
1258 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s",
1259 target=ts[0],
1260 subtarget=ts[1],
1261 ),
1262 ],
1263 haltOnFailure=True,
1264 doStepIf=IsKmodArchiveEnabled,
1265 )
1266 )
1267
1268 factory.addStep(
1269 ShellCommand(
1270 name="kmodprepare",
1271 description="Preparing kmod archive",
1272 descriptionDone="Kmod archive prepared",
1273 command=[
1274 "rsync",
1275 "--include=/kmod-*.ipk",
1276 "--exclude=*",
1277 "-va",
1278 Interpolate(
1279 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/",
1280 target=ts[0],
1281 subtarget=ts[1],
1282 ),
1283 Interpolate(
1284 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1285 target=ts[0],
1286 subtarget=ts[1],
1287 ),
1288 ],
1289 haltOnFailure=True,
1290 doStepIf=IsKmodArchiveEnabled,
1291 )
1292 )
1293
1294 factory.addStep(
1295 ShellCommand(
1296 name="kmodindex",
1297 description="Indexing kmod archive",
1298 descriptionDone="Kmod archive indexed",
1299 command=[
1300 "make",
1301 Interpolate("-j%(prop:nproc:-1)s"),
1302 "package/index",
1303 "V=s",
1304 "CONFIG_SIGNED_PACKAGES=",
1305 Interpolate(
1306 "PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1307 target=ts[0],
1308 subtarget=ts[1],
1309 ),
1310 ],
1311 env=MakeEnv(),
1312 haltOnFailure=True,
1313 doStepIf=IsKmodArchiveEnabled,
1314 )
1315 )
1316
1317 # sign
1318 factory.addStep(
1319 MasterShellCommand(
1320 name="signprepare",
1321 descriptionDone="Temporary signing directory prepared",
1322 command=["mkdir", "-p", "%s/signing" % (work_dir)],
1323 haltOnFailure=True,
1324 doStepIf=IsSignEnabled,
1325 )
1326 )
1327
1328 factory.addStep(
1329 ShellCommand(
1330 name="signpack",
1331 description="Packing files to sign",
1332 descriptionDone="Files to sign packed",
1333 command=Interpolate(
1334 "find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz",
1335 target=ts[0],
1336 subtarget=ts[1],
1337 ),
1338 haltOnFailure=True,
1339 doStepIf=IsSignEnabled,
1340 )
1341 )
1342
1343 factory.addStep(
1344 FileUpload(
1345 workersrc="sign.tar.gz",
1346 masterdest="%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]),
1347 haltOnFailure=True,
1348 doStepIf=IsSignEnabled,
1349 )
1350 )
1351
1352 factory.addStep(
1353 MasterShellCommand(
1354 name="signfiles",
1355 description="Signing files",
1356 descriptionDone="Files signed",
1357 command=[
1358 "%s/signall.sh" % (scripts_dir),
1359 "%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]),
1360 Interpolate("%(prop:branch)s"),
1361 ],
1362 env={"CONFIG_INI": os.getenv("BUILDMASTER_CONFIG", "./config.ini")},
1363 haltOnFailure=True,
1364 doStepIf=IsSignEnabled,
1365 )
1366 )
1367
1368 factory.addStep(
1369 FileDownload(
1370 name="dlsigntargz",
1371 mastersrc="%s/signing/%s.%s.tar.gz" % (work_dir, ts[0], ts[1]),
1372 workerdest="sign.tar.gz",
1373 haltOnFailure=True,
1374 doStepIf=IsSignEnabled,
1375 )
1376 )
1377
1378 factory.addStep(
1379 ShellCommand(
1380 name="signunpack",
1381 description="Unpacking signed files",
1382 descriptionDone="Signed files unpacked",
1383 command=["tar", "-xzf", "sign.tar.gz"],
1384 haltOnFailure=True,
1385 doStepIf=IsSignEnabled,
1386 )
1387 )
1388
1389 # upload
1390 factory.addStep(
1391 ShellCommand(
1392 name="dirprepare",
1393 descriptionDone="Upload directory structure prepared",
1394 command=[
1395 "mkdir",
1396 "-p",
1397 Interpolate(
1398 "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s",
1399 target=ts[0],
1400 subtarget=ts[1],
1401 prefix=GetVersionPrefix,
1402 ),
1403 ],
1404 haltOnFailure=True,
1405 )
1406 )
1407
1408 factory.addStep(
1409 ShellCommand(
1410 name="linkprepare",
1411 descriptionDone="Repository symlink prepared",
1412 command=[
1413 "ln",
1414 "-s",
1415 "-f",
1416 Interpolate(
1417 "../packages-%(kw:basever)s",
1418 basever=util.Transform(GetBaseVersion, Property("branch")),
1419 ),
1420 Interpolate(
1421 "tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix
1422 ),
1423 ],
1424 doStepIf=IsNoMasterBuild,
1425 haltOnFailure=True,
1426 )
1427 )
1428
1429 factory.addStep(
1430 ShellCommand(
1431 name="kmoddirprepare",
1432 descriptionDone="Kmod archive upload directory prepared",
1433 command=[
1434 "mkdir",
1435 "-p",
1436 Interpolate(
1437 "tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s",
1438 target=ts[0],
1439 subtarget=ts[1],
1440 prefix=GetVersionPrefix,
1441 ),
1442 ],
1443 haltOnFailure=True,
1444 doStepIf=IsKmodArchiveEnabled,
1445 )
1446 )
1447
1448 factory.addStep(
1449 ShellCommand(
1450 name="dirupload",
1451 description="Uploading directory structure",
1452 descriptionDone="Directory structure uploaded",
1453 command=["rsync", Interpolate("-az%(prop:rsync_ipv4:+4)s")]
1454 + rsync_defopts
1455 + [
1456 "tmp/upload/",
1457 Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url")),
1458 ],
1459 env={
1460 "RSYNC_PASSWORD": Interpolate(
1461 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1462 )
1463 },
1464 haltOnFailure=True,
1465 logEnviron=False,
1466 locks=NetLockUl,
1467 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1468 )
1469 )
1470
1471 # download remote sha256sums to 'target-sha256sums'
1472 factory.addStep(
1473 ShellCommand(
1474 name="target-sha256sums",
1475 description="Fetching remote sha256sums for target",
1476 descriptionDone="Remote sha256sums for target fetched",
1477 command=["rsync", Interpolate("-z%(prop:rsync_ipv4:+4)s")]
1478 + rsync_defopts
1479 + [
1480 Interpolate(
1481 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums",
1482 url=GetRsyncParams.withArgs("bin", "url"),
1483 target=ts[0],
1484 subtarget=ts[1],
1485 prefix=GetVersionPrefix,
1486 ),
1487 "target-sha256sums",
1488 ],
1489 env={
1490 "RSYNC_PASSWORD": Interpolate(
1491 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1492 )
1493 },
1494 logEnviron=False,
1495 haltOnFailure=False,
1496 flunkOnFailure=False,
1497 warnOnFailure=False,
1498 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1499 )
1500 )
1501
1502 # build list of files to upload
1503 factory.addStep(
1504 FileDownload(
1505 name="dlsha2rsyncpl",
1506 mastersrc=scripts_dir + "/sha2rsync.pl",
1507 workerdest="../sha2rsync.pl",
1508 mode=0o755,
1509 )
1510 )
1511
1512 factory.addStep(
1513 ShellCommand(
1514 name="buildlist",
1515 description="Building list of files to upload",
1516 descriptionDone="List of files to upload built",
1517 command=[
1518 "../sha2rsync.pl",
1519 "target-sha256sums",
1520 Interpolate(
1521 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums",
1522 target=ts[0],
1523 subtarget=ts[1],
1524 ),
1525 "rsynclist",
1526 ],
1527 haltOnFailure=True,
1528 )
1529 )
1530
1531 factory.addStep(
1532 FileDownload(
1533 name="dlrsync.sh",
1534 mastersrc=scripts_dir + "/rsync.sh",
1535 workerdest="../rsync.sh",
1536 mode=0o755,
1537 )
1538 )
1539
1540 # upload new files and update existing ones
1541 factory.addStep(
1542 ShellCommand(
1543 name="targetupload",
1544 description="Uploading target files",
1545 descriptionDone="Target files uploaded",
1546 command=[
1547 "../rsync.sh",
1548 "--exclude=/kmods/",
1549 "--files-from=rsynclist",
1550 "--delay-updates",
1551 "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]),
1552 ]
1553 + rsync_defopts
1554 + [
1555 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1556 Interpolate(
1557 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
1558 target=ts[0],
1559 subtarget=ts[1],
1560 ),
1561 Interpolate(
1562 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
1563 url=GetRsyncParams.withArgs("bin", "url"),
1564 target=ts[0],
1565 subtarget=ts[1],
1566 prefix=GetVersionPrefix,
1567 ),
1568 ],
1569 env={
1570 "RSYNC_PASSWORD": Interpolate(
1571 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1572 )
1573 },
1574 haltOnFailure=True,
1575 logEnviron=False,
1576 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1577 )
1578 )
1579
1580 # delete files which don't exist locally
1581 factory.addStep(
1582 ShellCommand(
1583 name="targetprune",
1584 description="Pruning target files",
1585 descriptionDone="Target files pruned",
1586 command=[
1587 "../rsync.sh",
1588 "--exclude=/kmods/",
1589 "--delete",
1590 "--existing",
1591 "--ignore-existing",
1592 "--delay-updates",
1593 "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]),
1594 ]
1595 + rsync_defopts
1596 + [
1597 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1598 Interpolate(
1599 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/",
1600 target=ts[0],
1601 subtarget=ts[1],
1602 ),
1603 Interpolate(
1604 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/",
1605 url=GetRsyncParams.withArgs("bin", "url"),
1606 target=ts[0],
1607 subtarget=ts[1],
1608 prefix=GetVersionPrefix,
1609 ),
1610 ],
1611 env={
1612 "RSYNC_PASSWORD": Interpolate(
1613 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1614 )
1615 },
1616 haltOnFailure=True,
1617 logEnviron=False,
1618 locks=NetLockUl,
1619 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1620 )
1621 )
1622
1623 factory.addStep(
1624 ShellCommand(
1625 name="kmodupload",
1626 description="Uploading kmod archive",
1627 descriptionDone="Kmod archive uploaded",
1628 command=[
1629 "../rsync.sh",
1630 "--delete",
1631 "--delay-updates",
1632 "--partial-dir=.~tmp~%s~%s" % (ts[0], ts[1]),
1633 ]
1634 + rsync_defopts
1635 + [
1636 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1637 Interpolate(
1638 "bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/",
1639 target=ts[0],
1640 subtarget=ts[1],
1641 ),
1642 Interpolate(
1643 "%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/",
1644 url=GetRsyncParams.withArgs("bin", "url"),
1645 target=ts[0],
1646 subtarget=ts[1],
1647 prefix=GetVersionPrefix,
1648 ),
1649 ],
1650 env={
1651 "RSYNC_PASSWORD": Interpolate(
1652 "%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")
1653 )
1654 },
1655 haltOnFailure=True,
1656 logEnviron=False,
1657 locks=NetLockUl,
1658 doStepIf=IsKmodArchiveAndRsyncEnabled,
1659 )
1660 )
1661
1662 factory.addStep(
1663 ShellCommand(
1664 name="sourcelist",
1665 description="Finding source archives to upload",
1666 descriptionDone="Source archives to upload found",
1667 command="find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1668 haltOnFailure=True,
1669 )
1670 )
1671
1672 factory.addStep(
1673 ShellCommand(
1674 name="sourceupload",
1675 description="Uploading source archives",
1676 descriptionDone="Source archives uploaded",
1677 command=[
1678 "../rsync.sh",
1679 "--files-from=sourcelist",
1680 "--size-only",
1681 "--delay-updates",
1682 ]
1683 + rsync_defopts
1684 + [
1685 Interpolate(
1686 "--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s",
1687 target=ts[0],
1688 subtarget=ts[1],
1689 ),
1690 Interpolate("-a%(prop:rsync_ipv4:+4)s"),
1691 "dl/",
1692 Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url")),
1693 ],
1694 env={
1695 "RSYNC_PASSWORD": Interpolate(
1696 "%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")
1697 )
1698 },
1699 haltOnFailure=True,
1700 logEnviron=False,
1701 locks=NetLockUl,
1702 doStepIf=util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1703 )
1704 )
1705
1706 factory.addStep(
1707 ShellCommand(
1708 name="df",
1709 description="Reporting disk usage",
1710 command=["df", "-h", "."],
1711 env={"LC_ALL": "C"},
1712 logEnviron=False,
1713 haltOnFailure=False,
1714 flunkOnFailure=False,
1715 warnOnFailure=False,
1716 alwaysRun=True,
1717 )
1718 )
1719
1720 factory.addStep(
1721 ShellCommand(
1722 name="du",
1723 description="Reporting estimated file space usage",
1724 command=["du", "-sh", "."],
1725 env={"LC_ALL": "C"},
1726 logEnviron=False,
1727 haltOnFailure=False,
1728 flunkOnFailure=False,
1729 warnOnFailure=False,
1730 alwaysRun=True,
1731 )
1732 )
1733
1734 factory.addStep(
1735 ShellCommand(
1736 name="ccachestat",
1737 description="Reporting ccache stats",
1738 command=["ccache", "-s"],
1739 logEnviron=False,
1740 want_stderr=False,
1741 haltOnFailure=False,
1742 flunkOnFailure=False,
1743 warnOnFailure=False,
1744 doStepIf=util.Transform(bool, Property("ccache_command")),
1745 )
1746 )
1747
1748 return factory
1749
1750
1751 for brname in branchNames:
1752 for target in targets[brname]:
1753 bldrname = brname + "_" + target
1754 c["builders"].append(
1755 BuilderConfig(
1756 name=bldrname,
1757 workernames=workerNames,
1758 factory=prepareFactory(target),
1759 tags=[
1760 brname,
1761 ],
1762 nextBuild=GetNextBuild,
1763 canStartBuild=canStartBuild,
1764 )
1765 )
1766
1767
1768 ####### STATUS TARGETS
1769
1770 # 'status' is a list of Status Targets. The results of each build will be
1771 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1772 # including web pages, email senders, and IRC bots.
1773
1774 if "status_bind" in inip1:
1775 c["www"] = {
1776 "port": inip1.get("status_bind"),
1777 "plugins": {"waterfall_view": True, "console_view": True, "grid_view": True},
1778 }
1779
1780 if "status_user" in inip1 and "status_password" in inip1:
1781 c["www"]["auth"] = util.UserPasswordAuth(
1782 [(inip1.get("status_user"), inip1.get("status_password"))]
1783 )
1784 c["www"]["authz"] = util.Authz(
1785 allowRules=[util.AnyControlEndpointMatcher(role="admins")],
1786 roleMatchers=[
1787 util.RolesFromUsername(
1788 roles=["admins"], usernames=[inip1.get("status_user")]
1789 )
1790 ],
1791 )
1792
1793 c["services"] = []
1794 if ini.has_section("irc"):
1795 iniirc = ini["irc"]
1796 irc_host = iniirc.get("host", None)
1797 irc_port = iniirc.getint("port", 6667)
1798 irc_chan = iniirc.get("channel", None)
1799 irc_nick = iniirc.get("nickname", None)
1800 irc_pass = iniirc.get("password", None)
1801
1802 if irc_host and irc_nick and irc_chan:
1803 irc = reporters.IRC(
1804 irc_host,
1805 irc_nick,
1806 port=irc_port,
1807 password=irc_pass,
1808 channels=[irc_chan],
1809 notify_events=["exception", "problem", "recovery"],
1810 )
1811
1812 c["services"].append(irc)
1813
1814 c["revlink"] = util.RevlinkMatch(
1815 [r"https://git.openwrt.org/openwrt/(.*).git"],
1816 r"https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s",
1817 )
1818
1819 ####### DB URL
1820
1821 c["db"] = {
1822 # This specifies what database buildbot uses to store its state. You can leave
1823 # this at its default for all but the largest installations.
1824 "db_url": "sqlite:///state.sqlite",
1825 }
1826
1827 c["buildbotNetUsageData"] = None