2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes.gitpoller import GitPoller
19 from buildbot.config import BuilderConfig
20 from buildbot.plugins import reporters
21 from buildbot.plugins import schedulers
22 from buildbot.plugins import steps
23 from buildbot.plugins import util
24 from buildbot.process import properties
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Interpolate
28 from buildbot.process.properties import Property
29 from buildbot.schedulers.basic import AnyBranchScheduler
30 from buildbot.schedulers.forcesched import BaseParameter
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.schedulers.forcesched import ValidationError
33 from buildbot.steps.master import MasterShellCommand
34 from buildbot.steps.shell import SetPropertyFromCommand
35 from buildbot.steps.shell import ShellCommand
36 from buildbot.steps.source.git import Git
37 from buildbot.steps.transfer import FileDownload
38 from buildbot.steps.transfer import FileUpload
39 from buildbot.steps.transfer import StringDownload
40 from buildbot.worker import Worker
41 from buildbot.worker.local import LocalWorker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini:
55 raise ValueError("Fix your configuration")
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
63 repo_url = ini['repo'].get("url")
65 rsync_defopts = ["-v", "--timeout=120"]
67 #if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
68 # rsync_bin_defopts += ["--contimeout=20"]
72 def ini_parse_branch(section):
74 name = section.get("name")
77 raise ValueError("missing 'name' in " + repr(section))
79 raise ValueError("duplicate branch name in " + repr(section))
82 b["bin_url"] = section.get("binary_url")
83 b["bin_key"] = section.get("binary_password")
85 b["src_url"] = section.get("source_url")
86 b["src_key"] = section.get("source_password")
88 b["gpg_key"] = section.get("gpg_key")
90 b["usign_key"] = section.get("usign_key")
91 usign_comment = "untrusted comment: " + name.replace("-", " ").title() + " key"
92 b["usign_comment"] = section.get("usign_comment", usign_comment)
94 b["config_seed"] = section.get("config_seed")
96 b["kmod_archive"] = section.getboolean("kmod_archive", False)
99 log.msg("Configured branch: {}".format(name))
101 # PB port can be either a numeric port or a connection string
102 pb_port = inip1.get("port") or 9989
104 # This is the dictionary that the buildmaster pays attention to. We also use
105 # a shorter alias to save typing.
106 c = BuildmasterConfig = {}
108 ####### PROJECT IDENTITY
110 # the 'title' string will appear at the top of this buildbot
111 # installation's html.WebStatus home page (linked to the
112 # 'titleURL') and is embedded in the title of the waterfall HTML page.
114 c['title'] = ini['general'].get("title")
115 c['titleURL'] = ini['general'].get("title_url")
117 # the 'buildbotURL' string should point to the location where the buildbot's
118 # internal web server (usually the html.WebStatus page) is visible. This
119 # typically uses the port number set in the Waterfall 'status' entry, but
120 # with an externally-visible host name which the buildbot cannot figure out
123 c['buildbotURL'] = inip1.get("buildbot_url")
127 # The 'workers' list defines the set of recognized buildworkers. Each element is
128 # a Worker object, specifying a unique worker name and password. The same
129 # worker name and password must be configured on the worker.
134 def ini_parse_workers(section):
135 name = section.get("name")
136 password = section.get("password")
137 phase = section.getint("phase")
138 tagonly = section.getboolean("tag_only")
139 rsyncipv4 = section.getboolean("rsync_ipv4")
141 if not name or not password or not phase == 1:
142 log.msg("invalid worker configuration ignored: {}".format(repr(section)))
145 sl_props = { 'dl_lock':None, 'ul_lock':None, 'tag_only':tagonly }
146 if "dl_lock" in section:
147 lockname = section.get("dl_lock")
148 sl_props['dl_lock'] = lockname
149 if lockname not in NetLocks:
150 NetLocks[lockname] = locks.MasterLock(lockname)
151 if "ul_lock" in section:
152 lockname = section.get("ul_lock")
153 sl_props['ul_lock'] = lockname
154 if lockname not in NetLocks:
155 NetLocks[lockname] = locks.MasterLock(lockname)
157 sl_props['rsync_ipv4'] = True # only set prop if required, we use '+' Interpolate substitution
159 log.msg("Configured worker: {}".format(name))
160 # NB: phase1 build factory requires workers to be single-build only
161 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
164 for section in ini.sections():
165 if section.startswith("branch "):
166 ini_parse_branch(ini[section])
168 if section.startswith("worker "):
169 ini_parse_workers(ini[section])
171 # list of branches in build-priority order
172 branchNames = [branches[b]["name"] for b in branches]
174 c['protocols'] = {'pb': {'port': pb_port}}
177 c['collapseRequests'] = True
179 # Reduce amount of backlog data
180 c['configurators'] = [util.JanitorConfigurator(
181 logHorizon=timedelta(days=3),
185 @defer.inlineCallbacks
186 def getNewestCompleteTime(bldr):
187 """Returns the complete_at of the latest completed and not SKIPPED
188 build request for this builder, or None if there are no such build
189 requests. We need to filter out SKIPPED requests because we're
190 using collapseRequests=True which is unfortunately marking all
191 previous requests as complete when new buildset is created.
193 @returns: datetime instance or None, via Deferred
196 bldrid = yield bldr.getBuilderId()
197 completed = yield bldr.master.data.get(
198 ('builders', bldrid, 'buildrequests'),
200 resultspec.Filter('complete', 'eq', [True]),
201 resultspec.Filter('results', 'ne', [results.SKIPPED]),
203 order=['-complete_at'], limit=1)
207 complete_at = completed[0]['complete_at']
209 last_build = yield bldr.master.data.get(
212 resultspec.Filter('builderid', 'eq', [bldrid]),
214 order=['-started_at'], limit=1)
216 if last_build and last_build[0]:
217 last_complete_at = last_build[0]['complete_at']
218 if last_complete_at and (last_complete_at > complete_at):
219 return last_complete_at
223 @defer.inlineCallbacks
224 def prioritizeBuilders(master, builders):
225 """Returns sorted list of builders by their last timestamp of completed and
226 not skipped build, ordered first by branch name.
228 @returns: list of sorted builders
231 bldrNamePrio = { "__Janitor": 0, "00_force_build": 0 }
233 for bname in branchNames:
234 bldrNamePrio[bname] = i
237 def is_building(bldr):
238 return bool(bldr.building) or bool(bldr.old_building)
241 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
242 d.addCallback(lambda complete_at: (complete_at, bldr))
246 (complete_at, bldr) = item
249 for (name, prio) in bldrNamePrio.items():
250 if bldr.name.startswith(name):
256 complete_at = date.replace(tzinfo=tzutc())
258 if is_building(bldr):
260 complete_at = date.replace(tzinfo=tzutc())
262 return (pos, complete_at, bldr.name)
264 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
265 results.sort(key=bldr_sort)
268 # log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
270 return [r[1] for r in results]
272 c['prioritizeBuilders'] = prioritizeBuilders
274 ####### CHANGESOURCES
279 def populateTargets():
280 """ fetch a shallow clone of each configured branch in turn:
281 execute dump-target-info.pl and collate the results to ensure
282 targets that only exist in specific branches get built.
283 This takes a while during master startup but is executed only once.
285 log.msg("Populating targets, this will take time")
286 sourcegit = work_dir + '/source.git'
287 for branch in branchNames:
288 if os.path.isdir(sourcegit):
289 subprocess.call(["rm", "-rf", sourcegit])
291 subprocess.call(["git", "clone", "-q", "--depth=1", "--branch="+branch, repo_url, sourcegit])
293 os.makedirs(sourcegit + '/tmp', exist_ok=True)
294 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
295 stdout = subprocess.PIPE, stderr = subprocess.DEVNULL, cwd = sourcegit)
298 line = findtargets.stdout.readline()
301 ta = line.decode().strip().split(' ')
304 subprocess.call(["rm", "-rf", sourcegit])
308 # the 'change_source' setting tells the buildmaster how it should find out
309 # about source code changes.
311 c['change_source'] = []
312 c['change_source'].append(GitPoller(
314 workdir=work_dir+'/work.git', branches=branchNames,
315 pollAtLaunch=True, pollinterval=300))
319 # Configure the Schedulers, which decide how to react to incoming changes.
321 # Selector for known valid tags
322 class TagChoiceParameter(BaseParameter):
323 spec_attributes = ["strict", "choices"]
327 def __init__(self, name, label=None, **kw):
328 super().__init__(name, label, **kw)
329 self._choice_list = []
331 def getRevTags(self, findtag=None):
335 # we will filter out tags that do no match the configured branches
336 for b in branchNames:
337 basever = re.search(r'-([0-9]+\.[0-9]+)$', b)
339 branchvers.append(basever[1])
341 # grab tags from remote repository
342 alltags = subprocess.Popen(
343 ['git', 'ls-remote', '--tags', repo_url],
344 stdout = subprocess.PIPE)
347 line = alltags.stdout.readline()
352 (rev, tag) = line.split()
354 # does it match known format? ('vNN.NN.NN(-rcN)')
355 tagver = re.search(r'\brefs/tags/(v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', tag.decode().strip())
357 # only list valid tags matching configured branches
358 if tagver and any(tagver[1][1:].startswith(b) for b in branchvers):
359 # if we want a specific tag, ignore all that don't match
360 if findtag and findtag != tagver[1]:
362 taglist.append({'rev': rev.decode().strip(), 'tag': tagver[1]})
368 taglist = [rt['tag'] for rt in self.getRevTags()]
369 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
370 taglist.insert(0, '')
372 self._choice_list = taglist
374 return self._choice_list
376 def updateFromKwargs(self, properties, kwargs, **unused):
377 tag = self.getFromKwargs(kwargs)
378 properties[self.name] = tag
380 # find the commit matching the tag
381 findtag = self.getRevTags(tag)
384 raise ValidationError("Couldn't find tag")
386 properties['force_revision'] = findtag[0]['rev']
388 # find the branch matching the tag
390 branchver = re.search(r'v([0-9]+\.[0-9]+)', tag)
391 for b in branchNames:
392 if b.endswith(branchver[1]):
396 raise ValidationError("Couldn't find branch")
398 properties['force_branch'] = branch
400 def parse_from_arg(self, s):
401 if self.strict and s not in self._choice_list:
402 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
406 @defer.inlineCallbacks
407 def builderNames(props):
408 """ since we have per branch and per target builders,
409 address the relevant builder for each new buildrequest
410 based on the request's desired branch and target.
412 branch = props.getProperty("branch")
413 target = props.getProperty("target", "")
418 # if that didn't work, try sourcestamp to find a branch
420 # match builders with target branch
421 ss = props.sourcestamps[0]
423 branch = ss['branch']
425 log.msg("couldn't find builder")
426 return [] # nothing works
428 bname = branch + "_" + target
431 for b in (yield props.master.data.get(('builders',))):
432 if not b['name'].startswith(bname):
434 builders.append(b['name'])
439 c['schedulers'].append(AnyBranchScheduler(
441 change_filter = util.ChangeFilter(branch=branchNames),
442 treeStableTimer = 15*60,
443 builderNames = builderNames))
445 c['schedulers'].append(ForceScheduler(
447 buttonName = "Force builds",
448 label = "Force build details",
449 builderNames = [ "00_force_build" ],
452 util.CodebaseParameter(
454 label = "Repository",
455 branch = util.FixedParameter(name = "branch", default = ""),
456 revision = util.FixedParameter(name = "revision", default = ""),
457 repository = util.FixedParameter(name = "repository", default = ""),
458 project = util.FixedParameter(name = "project", default = "")
462 reason = util.StringParameter(
465 default = "Trigger build",
471 # NB: avoid nesting to simplify processing of properties
472 util.ChoiceStringParameter(
474 label = "Build target",
476 choices = [ "all" ] + list(targets)
486 c['schedulers'].append(schedulers.Triggerable(name="trigger", builderNames=builderNames))
490 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
491 # what steps, and which workers can execute them. Note that any particular build will
492 # only take place on one worker.
494 def IsNoMasterBuild(step):
495 return step.getProperty("branch") != "master"
497 def IsUsignEnabled(step):
498 branch = step.getProperty("branch")
499 return branch and branches[branch].get("usign_key")
501 def IsSignEnabled(step):
502 branch = step.getProperty("branch")
503 return IsUsignEnabled(step) or branch and branches[branch].get("gpg_key")
505 def IsKmodArchiveEnabled(step):
506 branch = step.getProperty("branch")
507 return branch and branches[branch].get("kmod_archive")
509 def GetBaseVersion(branch):
510 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", branch):
511 return branch.split('-')[1]
516 def GetVersionPrefix(props):
517 branch = props.getProperty("branch")
518 basever = GetBaseVersion(branch)
519 if props.hasProperty("tag") and re.match(r"^v[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
520 return "%s/" % props["tag"][1:]
521 elif basever != "master":
522 return "%s-SNAPSHOT/" % basever
527 def GetConfigSeed(props):
528 branch = props.getProperty("branch")
529 return branch and branches[branch].get("config_seed") or ""
532 def GetRsyncParams(props, srcorbin, urlorkey):
533 # srcorbin: 'bin' or 'src'; urlorkey: 'url' or 'key'
534 branch = props.getProperty("branch")
535 opt = srcorbin + "_" + urlorkey
536 return branch and branches[branch].get(opt)
539 def GetUsignKey(props):
540 branch = props.getProperty("branch")
541 return branch and branches[branch].get("usign_key")
543 def GetNextBuild(builder, requests):
546 # order tagged build first
547 if r.properties.hasProperty("tag"):
551 #log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
554 def MakeEnv(overrides=None, tryccache=False):
556 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
557 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
560 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
561 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
562 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
564 env['CC'] = env['CCC']
565 env['CXX'] = env['CCXX']
567 if overrides is not None:
568 env.update(overrides)
572 def NetLockDl(props, extralock=None):
574 if props.hasProperty("dl_lock"):
575 lock = NetLocks[props["dl_lock"]]
577 return [lock.access('exclusive')]
582 def NetLockUl(props):
584 if props.hasProperty("ul_lock"):
585 lock = NetLocks[props["ul_lock"]]
587 return [lock.access('exclusive')]
591 def IsTargetSelected(target):
592 def CheckTargetProperty(step):
593 selected_target = step.getProperty("target", "all")
594 if selected_target != "all" and selected_target != target:
598 return CheckTargetProperty
601 def UsignSec2Pub(props):
602 branch = props.getProperty("branch")
604 comment = branches[branch].get("usign_comment") or "untrusted comment: secret key"
605 seckey = branches[branch].get("usign_key")
606 seckey = base64.b64decode(seckey)
610 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
611 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
614 def canStartBuild(builder, wfb, request):
615 """ filter out non tag requests for tag_only workers. """
616 wtagonly = wfb.worker.properties.getProperty('tag_only')
617 tag = request.properties.getProperty('tag')
619 if wtagonly and not tag:
628 for worker in c['workers']:
629 workerNames.append(worker.workername)
631 # add a single LocalWorker to handle the forcebuild builder
632 c['workers'].append(LocalWorker("__local_force_build", max_builds=1))
634 force_factory = BuildFactory()
635 force_factory.addStep(steps.Trigger(
636 name = "trigger_build",
637 schedulerNames = [ "trigger" ],
638 sourceStamps = [{ "codebase": "", "branch": Property("force_branch"), "revision": Property("force_revision"), "repository": repo_url, "project": "" }],
639 set_properties = { "reason": Property("reason"), "tag": Property("tag"), "target": Property("target") },
642 c['builders'].append(BuilderConfig(
643 name = "00_force_build",
644 workername = "__local_force_build",
645 factory = force_factory))
648 # NB the phase1 build factory assumes workers are single-build only
649 for target in targets:
650 ts = target.split('/')
652 factory = BuildFactory()
654 # setup shared work directory if required
655 factory.addStep(ShellCommand(
657 descriptionDone = "Shared work directory set up",
658 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
660 haltOnFailure = True,
663 # find number of cores
664 factory.addStep(SetPropertyFromCommand(
667 description = "Finding number of CPUs",
671 # find gcc and g++ compilers
672 factory.addStep(FileDownload(
673 name = "dlfindbinpl",
674 mastersrc = scripts_dir + '/findbin.pl',
675 workerdest = "../findbin.pl",
679 factory.addStep(SetPropertyFromCommand(
681 property = "cc_command",
682 description = "Finding gcc command",
683 command = ["../findbin.pl", "gcc", "", ""],
684 haltOnFailure = True,
687 factory.addStep(SetPropertyFromCommand(
689 property = "cxx_command",
690 description = "Finding g++ command",
691 command = ["../findbin.pl", "g++", "", ""],
692 haltOnFailure = True,
695 # see if ccache is available
696 factory.addStep(SetPropertyFromCommand(
698 property = "ccache_command",
699 description = "Testing for ccache command",
700 command = ["which", "ccache"],
701 haltOnFailure = False,
702 flunkOnFailure = False,
703 warnOnFailure = False,
704 hideStepIf = lambda r, s: r==results.FAILURE,
707 # check out the source
709 # if repo doesn't exist: 'git clone repourl'
710 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -f -f -d -x'. Only works with mode='full'
711 # git cat-file -e <commit>
712 # git checkout -f <commit>
713 # git checkout -B <branch>
721 haltOnFailure = True,
725 factory.addStep(ShellCommand(
727 description = "Fetching Git remote refs",
728 descriptionDone = "Git remote refs fetched",
729 command = ["git", "fetch", "origin", Interpolate("+refs/heads/%(prop:branch)s:refs/remotes/origin/%(prop:branch)s")],
730 haltOnFailure = True,
733 # Verify that Git HEAD points to a tag or branch
734 # Ref: https://web.archive.org/web/20190729224316/http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
735 factory.addStep(ShellCommand(
737 description = "Ensuring that Git HEAD is pointing to a branch or tag",
738 descriptionDone = "Git HEAD is sane",
739 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
740 haltOnFailure = True,
743 factory.addStep(ShellCommand(
745 description = "Remove tmp folder",
746 command=["rm", "-rf", "tmp/"],
750 factory.addStep(ShellCommand(
751 name = "rmfeedlinks",
752 description = "Remove feed symlinks",
753 command=["rm", "-rf", "package/feeds/"],
756 factory.addStep(StringDownload(
758 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
759 workerdest = "../ccache_cc.sh",
763 factory.addStep(StringDownload(
765 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
766 workerdest = "../ccache_cxx.sh",
771 factory.addStep(ShellCommand(
772 name = "updatefeeds",
773 description = "Updating feeds",
774 command=["./scripts/feeds", "update"],
775 env = MakeEnv(tryccache=True),
776 haltOnFailure = True,
781 factory.addStep(ShellCommand(
782 name = "installfeeds",
783 description = "Installing feeds",
784 command=["./scripts/feeds", "install", "-a"],
785 env = MakeEnv(tryccache=True),
786 haltOnFailure = True,
790 factory.addStep(StringDownload(
791 name = "dlconfigseed",
792 s = Interpolate("%(kw:seed)s\n", seed=GetConfigSeed),
793 workerdest = ".config",
798 factory.addStep(ShellCommand(
800 descriptionDone = ".config seeded",
801 command = Interpolate("printf 'CONFIG_TARGET_%(kw:target)s=y\\nCONFIG_TARGET_%(kw:target)s_%(kw:subtarget)s=y\\nCONFIG_SIGNED_PACKAGES=%(kw:usign:#?|y|n)s\\n' >> .config", target=ts[0], subtarget=ts[1], usign=GetUsignKey),
804 factory.addStep(ShellCommand(
806 description = "Removing output directory",
807 command = ["rm", "-rf", "bin/"],
810 factory.addStep(ShellCommand(
812 description = "Populating .config",
813 command = ["make", "defconfig"],
817 # check arch - exit early if does not exist - NB: some targets do not define CONFIG_TARGET_target_subtarget
818 factory.addStep(ShellCommand(
820 description = "Checking architecture",
821 descriptionDone = "Architecture validated",
822 command = 'grep -sq CONFIG_TARGET_%s=y .config && grep -sq CONFIG_TARGET_SUBTARGET=\\"%s\\" .config' %(ts[0], ts[1]),
826 haltOnFailure = True,
827 flunkOnFailure = False, # this is not a build FAILURE - TODO mark build as SKIPPED
831 factory.addStep(SetPropertyFromCommand(
834 description = "Finding libc suffix",
835 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"],
839 factory.addStep(StringDownload(
840 name = "dlkeybuildpub",
841 s = Interpolate("%(kw:sec2pub)s", sec2pub=UsignSec2Pub),
842 workerdest = "key-build.pub",
844 doStepIf = IsUsignEnabled,
847 factory.addStep(StringDownload(
849 s = "# fake private key",
850 workerdest = "key-build",
852 doStepIf = IsUsignEnabled,
855 factory.addStep(StringDownload(
856 name = "dlkeybuilducert",
857 s = "# fake certificate",
858 workerdest = "key-build.ucert",
860 doStepIf = IsUsignEnabled,
864 factory.addStep(ShellCommand(
866 description = "Preparing dl/",
867 descriptionDone = "dl/ prepared",
868 command = 'mkdir -p ../dl && rm -rf "build/dl" && ln -s ../../dl "build/dl"',
869 workdir = Property("builddir"),
875 factory.addStep(ShellCommand(
877 description = "Pruning dl/",
878 descriptionDone = "dl/ pruned",
879 command = 'find dl/ -atime +15 -delete -print',
884 factory.addStep(ShellCommand(
886 description = "Building and installing GNU tar",
887 descriptionDone = "GNU tar built and installed",
888 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
889 env = MakeEnv(tryccache=True),
890 haltOnFailure = True,
894 factory.addStep(ShellCommand(
896 description = "Populating dl/",
897 descriptionDone = "dl/ populated",
898 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
904 factory.addStep(ShellCommand(
906 description = "Cleaning base-files",
907 command=["make", "package/base-files/clean", "V=s"],
911 factory.addStep(ShellCommand(
913 description = "Building and installing tools",
914 descriptionDone = "Tools built and installed",
915 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
916 env = MakeEnv(tryccache=True),
917 haltOnFailure = True,
920 factory.addStep(ShellCommand(
922 description = "Building and installing toolchain",
923 descriptionDone = "Toolchain built and installed",
924 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
926 haltOnFailure = True,
929 factory.addStep(ShellCommand(
931 description = "Building kmods",
932 descriptionDone = "Kmods built",
933 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
935 haltOnFailure = True,
938 # find kernel version
939 factory.addStep(SetPropertyFromCommand(
940 name = "kernelversion",
941 property = "kernelversion",
942 description = "Finding the effective Kernel version",
943 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
944 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") },
947 factory.addStep(ShellCommand(
949 description = "Cleaning up package build",
950 descriptionDone = "Package build cleaned up",
951 command=["make", "package/cleanup", "V=s"],
954 factory.addStep(ShellCommand(
956 description = "Building packages",
957 descriptionDone = "Packages built",
958 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
960 haltOnFailure = True,
963 factory.addStep(ShellCommand(
965 description = "Installing packages",
966 descriptionDone = "Packages installed",
967 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
969 haltOnFailure = True,
972 factory.addStep(ShellCommand(
974 description = "Indexing packages",
975 descriptionDone = "Packages indexed",
976 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
978 haltOnFailure = True,
981 factory.addStep(ShellCommand(
983 description = "Building and installing images",
984 descriptionDone = "Images built and installed",
985 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
987 haltOnFailure = True,
990 factory.addStep(ShellCommand(
992 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
993 command = "make -j1 buildinfo V=s || true",
995 haltOnFailure = True,
998 factory.addStep(ShellCommand(
999 name = "json_overview_image_info",
1000 description = "Generating profiles.json in target folder",
1001 command = "make -j1 json_overview_image_info V=s || true",
1003 haltOnFailure = True,
1006 factory.addStep(ShellCommand(
1008 description = "Calculating checksums",
1009 descriptionDone = "Checksums calculated",
1010 command=["make", "-j1", "checksum", "V=s"],
1012 haltOnFailure = True,
1015 factory.addStep(ShellCommand(
1017 descriptionDone = "Kmod directory created",
1018 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
1019 haltOnFailure = True,
1020 doStepIf = IsKmodArchiveEnabled,
1023 factory.addStep(ShellCommand(
1024 name = "kmodprepare",
1025 description = "Preparing kmod archive",
1026 descriptionDone = "Kmod archive prepared",
1027 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
1028 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
1029 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1030 haltOnFailure = True,
1031 doStepIf = IsKmodArchiveEnabled,
1034 factory.addStep(ShellCommand(
1036 description = "Indexing kmod archive",
1037 descriptionDone = "Kmod archive indexed",
1038 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
1039 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
1041 haltOnFailure = True,
1042 doStepIf = IsKmodArchiveEnabled,
1046 factory.addStep(MasterShellCommand(
1047 name = "signprepare",
1048 descriptionDone = "Temporary signing directory prepared",
1049 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
1050 haltOnFailure = True,
1051 doStepIf = IsSignEnabled,
1055 factory.addStep(ShellCommand(
1057 description = "Packing files to sign",
1058 descriptionDone = "Files to sign packed",
1059 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
1060 haltOnFailure = True,
1061 doStepIf = IsSignEnabled,
1064 factory.addStep(FileUpload(
1065 workersrc = "sign.tar.gz",
1066 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1067 haltOnFailure = True,
1068 doStepIf = IsSignEnabled,
1071 factory.addStep(MasterShellCommand(
1073 description = "Signing files",
1074 descriptionDone = "Files signed",
1075 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]), Interpolate("%(prop:branch)s")],
1076 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
1077 haltOnFailure = True,
1078 doStepIf = IsSignEnabled,
1081 factory.addStep(FileDownload(
1082 name = "dlsigntargz",
1083 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
1084 workerdest = "sign.tar.gz",
1085 haltOnFailure = True,
1086 doStepIf = IsSignEnabled,
1089 factory.addStep(ShellCommand(
1090 name = "signunpack",
1091 description = "Unpacking signed files",
1092 descriptionDone = "Signed files unpacked",
1093 command = ["tar", "-xzf", "sign.tar.gz"],
1094 haltOnFailure = True,
1095 doStepIf = IsSignEnabled,
1099 factory.addStep(ShellCommand(
1100 name = "dirprepare",
1101 descriptionDone = "Upload directory structure prepared",
1102 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1103 haltOnFailure = True,
1106 factory.addStep(ShellCommand(
1107 name = "linkprepare",
1108 descriptionDone = "Repository symlink prepared",
1109 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=util.Transform(GetBaseVersion, Property("branch"))), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1110 doStepIf = IsNoMasterBuild,
1111 haltOnFailure = True,
1114 factory.addStep(ShellCommand(
1115 name = "kmoddirprepare",
1116 descriptionDone = "Kmod archive upload directory prepared",
1117 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1118 haltOnFailure = True,
1119 doStepIf = IsKmodArchiveEnabled,
1122 factory.addStep(ShellCommand(
1124 description = "Uploading directory structure",
1125 descriptionDone = "Directory structure uploaded",
1126 command = ["rsync", "-az"] + rsync_defopts + [Interpolate("%(prop:rsync_ipv4:+-4)s"), "tmp/upload/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("bin", "url"))],
1127 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1128 haltOnFailure = True,
1131 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1134 # download remote sha256sums to 'target-sha256sums'
1135 factory.addStep(ShellCommand(
1136 name = "target-sha256sums",
1137 description = "Fetching remote sha256sums for target",
1138 descriptionDone = "Remote sha256sums for target fetched",
1139 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(prop:rsync_ipv4:+-4)s"), Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1140 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1142 haltOnFailure = False,
1143 flunkOnFailure = False,
1144 warnOnFailure = False,
1145 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1148 # build list of files to upload
1149 factory.addStep(FileDownload(
1150 name = "dlsha2rsyncpl",
1151 mastersrc = scripts_dir + '/sha2rsync.pl',
1152 workerdest = "../sha2rsync.pl",
1156 factory.addStep(ShellCommand(
1158 description = "Building list of files to upload",
1159 descriptionDone = "List of files to upload built",
1160 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1161 haltOnFailure = True,
1164 factory.addStep(FileDownload(
1165 name = "dlrsync.sh",
1166 mastersrc = scripts_dir + '/rsync.sh',
1167 workerdest = "../rsync.sh",
1171 # upload new files and update existing ones
1172 factory.addStep(ShellCommand(
1173 name = "targetupload",
1174 description = "Uploading target files",
1175 descriptionDone = "Target files uploaded",
1176 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1177 ["-a", Interpolate("%(prop:rsync_ipv4:+-4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1178 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1179 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1180 haltOnFailure = True,
1182 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1185 # delete files which don't exist locally
1186 factory.addStep(ShellCommand(
1187 name = "targetprune",
1188 description = "Pruning target files",
1189 descriptionDone = "Target files pruned",
1190 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1191 ["-a", Interpolate("%(prop:rsync_ipv4:+-4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1192 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1193 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1194 haltOnFailure = True,
1197 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("bin", "url")),
1200 factory.addStep(ShellCommand(
1201 name = "kmodupload",
1202 description = "Uploading kmod archive",
1203 descriptionDone = "Kmod archive uploaded",
1204 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1205 ["-a", Interpolate("%(prop:rsync_ipv4:+-4)s"), Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1206 Interpolate("%(kw:url)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", url=GetRsyncParams.withArgs("bin", "url"), target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1207 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("bin", "key")) },
1208 haltOnFailure = True,
1211 doStepIf = util.Transform(lambda a, b: bool(a and b), IsKmodArchiveEnabled, GetRsyncParams.withArgs("bin", "url")),
1214 factory.addStep(ShellCommand(
1215 name = "sourcelist",
1216 description = "Finding source archives to upload",
1217 descriptionDone = "Source archives to upload found",
1218 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1219 haltOnFailure = True,
1222 factory.addStep(ShellCommand(
1223 name = "sourceupload",
1224 description = "Uploading source archives",
1225 descriptionDone = "Source archives uploaded",
1226 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts + [Interpolate("%(prop:rsync_ipv4:+-4)s"),
1227 Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", Interpolate("%(kw:url)s/", url=GetRsyncParams.withArgs("src", "url"))],
1228 env={ 'RSYNC_PASSWORD': Interpolate("%(kw:key)s", key=GetRsyncParams.withArgs("src", "key")) },
1229 haltOnFailure = True,
1232 doStepIf = util.Transform(bool, GetRsyncParams.withArgs("src", "url")),
1235 factory.addStep(ShellCommand(
1237 description = "Reporting disk usage",
1238 command=["df", "-h", "."],
1239 env={'LC_ALL': 'C'},
1241 haltOnFailure = False,
1242 flunkOnFailure = False,
1243 warnOnFailure = False,
1247 factory.addStep(ShellCommand(
1249 description = "Reporting estimated file space usage",
1250 command=["du", "-sh", "."],
1251 env={'LC_ALL': 'C'},
1253 haltOnFailure = False,
1254 flunkOnFailure = False,
1255 warnOnFailure = False,
1259 factory.addStep(ShellCommand(
1260 name = "ccachestat",
1261 description = "Reporting ccache stats",
1262 command=["ccache", "-s"],
1264 want_stderr = False,
1265 haltOnFailure = False,
1266 flunkOnFailure = False,
1267 warnOnFailure = False,
1268 doStepIf = util.Transform(bool, Property("ccache_command")),
1271 for brname in branchNames:
1272 bldrname = brname + "_" + target
1273 c['builders'].append(BuilderConfig(name=bldrname, workernames=workerNames, factory=factory, tags=[brname,], nextBuild=GetNextBuild, canStartBuild=canStartBuild))
1276 ####### STATUS TARGETS
1278 # 'status' is a list of Status Targets. The results of each build will be
1279 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1280 # including web pages, email senders, and IRC bots.
1282 if "status_bind" in inip1:
1284 'port': inip1.get("status_bind"),
1286 'waterfall_view': True,
1287 'console_view': True,
1292 if "status_user" in inip1 and "status_password" in inip1:
1293 c['www']['auth'] = util.UserPasswordAuth([
1294 (inip1.get("status_user"), inip1.get("status_password"))
1296 c['www']['authz'] = util.Authz(
1297 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1298 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1302 if ini.has_section("irc"):
1304 irc_host = iniirc.get("host", None)
1305 irc_port = iniirc.getint("port", 6667)
1306 irc_chan = iniirc.get("channel", None)
1307 irc_nick = iniirc.get("nickname", None)
1308 irc_pass = iniirc.get("password", None)
1310 if irc_host and irc_nick and irc_chan:
1311 irc = reporters.IRC(irc_host, irc_nick,
1313 password = irc_pass,
1314 channels = [ irc_chan ],
1315 notify_events = [ 'exception', 'problem', 'recovery' ]
1318 c['services'].append(irc)
1320 c['revlink'] = util.RevlinkMatch([
1321 r'https://git.openwrt.org/openwrt/(.*).git'
1323 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1328 # This specifies what database buildbot uses to store its state. You can leave
1329 # this at its default for all but the largest installations.
1330 'db_url' : "sqlite:///state.sqlite",
1333 c['buildbotNetUsageData'] = None