2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
60 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
61 scripts_dir = os.path.abspath("../scripts")
63 config_seed = inip1.get("config_seed", "")
65 repo_url = ini['repo'].get("url")
66 repo_branch = ini['repo'].get("branch", "master")
68 rsync_bin_url = ini['rsync'].get("binary_url")
69 rsync_bin_key = ini['rsync'].get("binary_password")
70 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
72 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
73 rsync_bin_defopts += ["--contimeout=20"]
75 rsync_src_url = ini['rsync'].get("source_url")
76 rsync_src_key = ini['rsync'].get("source_password")
77 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
79 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
80 rsync_src_defopts += ["--contimeout=20"]
83 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
85 if ini.has_section("usign"):
86 usign_key = ini['usign'].get("key")
87 usign_comment = ini['usign'].get("comment", usign_comment)
89 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
91 # PB port can be either a numeric port or a connection string
92 pb_port = inip1.get("port") or 9989
94 # This is the dictionary that the buildmaster pays attention to. We also use
95 # a shorter alias to save typing.
96 c = BuildmasterConfig = {}
98 ####### PROJECT IDENTITY
100 # the 'title' string will appear at the top of this buildbot
101 # installation's html.WebStatus home page (linked to the
102 # 'titleURL') and is embedded in the title of the waterfall HTML page.
104 c['title'] = ini['general'].get("title")
105 c['titleURL'] = ini['general'].get("title_url")
107 # the 'buildbotURL' string should point to the location where the buildbot's
108 # internal web server (usually the html.WebStatus page) is visible. This
109 # typically uses the port number set in the Waterfall 'status' entry, but
110 # with an externally-visible host name which the buildbot cannot figure out
113 c['buildbotURL'] = inip1.get("buildbot_url")
117 # The 'workers' list defines the set of recognized buildworkers. Each element is
118 # a Worker object, specifying a unique worker name and password. The same
119 # worker name and password must be configured on the worker.
124 for section in ini.sections():
125 if section.startswith("worker "):
126 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
127 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
128 sl_props = { 'dl_lock':None, 'ul_lock':None }
129 name = ini.get(section, "name")
130 password = ini.get(section, "password")
131 if ini.has_option(section, "dl_lock"):
132 lockname = ini.get(section, "dl_lock")
133 sl_props['dl_lock'] = lockname
134 if lockname not in NetLocks:
135 NetLocks[lockname] = locks.MasterLock(lockname)
136 if ini.has_option(section, "ul_lock"):
137 lockname = ini.get(section, "ul_lock")
138 sl_props['ul_lock'] = lockname
139 if lockname not in NetLocks:
140 NetLocks[lockname] = locks.MasterLock(lockname)
141 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
143 c['protocols'] = {'pb': {'port': pb_port}}
146 c['collapseRequests'] = True
148 # Reduce amount of backlog data
149 c['configurators'] = [util.JanitorConfigurator(
150 logHorizon=timedelta(days=3),
154 @defer.inlineCallbacks
155 def getNewestCompleteTime(bldr):
156 """Returns the complete_at of the latest completed and not SKIPPED
157 build request for this builder, or None if there are no such build
158 requests. We need to filter out SKIPPED requests because we're
159 using collapseRequests=True which is unfortunately marking all
160 previous requests as complete when new buildset is created.
162 @returns: datetime instance or None, via Deferred
165 bldrid = yield bldr.getBuilderId()
166 completed = yield bldr.master.data.get(
167 ('builders', bldrid, 'buildrequests'),
169 resultspec.Filter('complete', 'eq', [True]),
170 resultspec.Filter('results', 'ne', [results.SKIPPED]),
172 order=['-complete_at'], limit=1)
176 complete_at = completed[0]['complete_at']
178 last_build = yield bldr.master.data.get(
181 resultspec.Filter('builderid', 'eq', [bldrid]),
183 order=['-started_at'], limit=1)
185 if last_build and last_build[0]:
186 last_complete_at = last_build[0]['complete_at']
187 if last_complete_at and (last_complete_at > complete_at):
188 return last_complete_at
192 @defer.inlineCallbacks
193 def prioritizeBuilders(master, builders):
194 """Returns sorted list of builders by their last timestamp of completed and
197 @returns: list of sorted builders
200 def is_building(bldr):
201 return bool(bldr.building) or bool(bldr.old_building)
204 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
205 d.addCallback(lambda complete_at: (complete_at, bldr))
209 (complete_at, bldr) = item
213 complete_at = date.replace(tzinfo=tzutc())
215 if is_building(bldr):
217 complete_at = date.replace(tzinfo=tzutc())
219 return (complete_at, bldr.name)
221 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
222 results.sort(key=bldr_sort)
225 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
227 return [r[1] for r in results]
229 c['prioritizeBuilders'] = prioritizeBuilders
231 ####### CHANGESOURCES
237 if not os.path.isdir(work_dir+'/source.git'):
238 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
240 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
242 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
243 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
244 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
247 line = findtargets.stdout.readline()
250 ta = line.decode().strip().split(' ')
251 targets.append(ta[0])
254 # the 'change_source' setting tells the buildmaster how it should find out
255 # about source code changes. Here we point to the buildbot clone of pyflakes.
257 c['change_source'] = []
258 c['change_source'].append(GitPoller(
260 workdir=work_dir+'/work.git', branch=repo_branch,
265 # Configure the Schedulers, which decide how to react to incoming changes. In this
266 # case, just kick off a 'basebuild' build
268 class TagChoiceParameter(BaseParameter):
269 spec_attributes = ["strict", "choices"]
273 def __init__(self, name, label=None, **kw):
274 super().__init__(name, label, **kw)
275 self._choice_list = []
280 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
283 findtags = subprocess.Popen(
284 ['git', 'ls-remote', '--tags', repo_url],
285 stdout = subprocess.PIPE)
288 line = findtags.stdout.readline()
293 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
295 if tagver and tagver[1].find(basever[1]) == 0:
296 taglist.append(tagver[1])
298 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
299 taglist.insert(0, '')
301 self._choice_list = taglist
303 return self._choice_list
305 def parse_from_arg(self, s):
306 if self.strict and s not in self._choice_list:
307 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
311 c['schedulers'].append(SingleBranchScheduler(
313 change_filter = filter.ChangeFilter(branch=repo_branch),
314 treeStableTimer = 60,
315 builderNames = targets))
317 c['schedulers'].append(ForceScheduler(
319 buttonName = "Force builds",
320 label = "Force build details",
321 builderNames = [ "00_force_build" ],
324 util.CodebaseParameter(
326 label = "Repository",
327 branch = util.FixedParameter(name = "branch", default = ""),
328 revision = util.FixedParameter(name = "revision", default = ""),
329 repository = util.FixedParameter(name = "repository", default = ""),
330 project = util.FixedParameter(name = "project", default = "")
334 reason = util.StringParameter(
337 default = "Trigger build",
343 util.NestedParameter(
345 label="Build Options",
348 util.ChoiceStringParameter(
350 label = "Build target",
352 choices = [ "all" ] + targets
366 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
367 # what steps, and which workers can execute them. Note that any particular build will
368 # only take place on one worker.
370 def IsTaggingRequested(step):
371 val = step.getProperty("tag")
372 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
377 def IsNoMasterBuild(step):
378 return repo_branch != "master"
380 def GetBaseVersion():
381 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
382 return repo_branch.split('-')[1]
387 def GetVersionPrefix(props):
388 basever = GetBaseVersion()
389 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
390 return "%s/" % props["tag"]
391 elif basever != "master":
392 return "%s-SNAPSHOT/" % basever
396 def GetNextBuild(builder, requests):
398 if r.properties and r.properties.hasProperty("tag"):
402 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
405 def MakeEnv(overrides=None, tryccache=False):
407 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
408 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
411 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
412 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
413 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
415 env['CC'] = env['CCC']
416 env['CXX'] = env['CCXX']
418 if overrides is not None:
419 env.update(overrides)
423 def NetLockDl(props):
425 if props.hasProperty("dl_lock"):
426 lock = NetLocks[props["dl_lock"]]
428 return [lock.access('exclusive')]
433 def NetLockUl(props):
435 if props.hasProperty("ul_lock"):
436 lock = NetLocks[props["ul_lock"]]
438 return [lock.access('exclusive')]
443 def TagPropertyValue(props):
444 if props.hasProperty("options"):
445 options = props.getProperty("options")
446 if type(options) is dict:
447 return options.get("tag")
450 def IsTargetSelected(target):
451 def CheckTargetProperty(step):
453 options = step.getProperty("options")
454 if type(options) is dict:
455 selected_target = options.get("target", "all")
456 if selected_target != "all" and selected_target != target:
463 return CheckTargetProperty
465 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
467 seckey = base64.b64decode(seckey)
471 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
472 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
477 dlLock = locks.WorkerLock("worker_dl")
481 for worker in c['workers']:
482 workerNames.append(worker.workername)
484 force_factory = BuildFactory()
486 c['builders'].append(BuilderConfig(
487 name = "00_force_build",
488 workernames = workerNames,
489 factory = force_factory))
491 for target in targets:
492 ts = target.split('/')
494 factory = BuildFactory()
496 # setup shared work directory if required
497 factory.addStep(ShellCommand(
499 description = "Setting up shared work directory",
500 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
502 haltOnFailure = True))
504 # find number of cores
505 factory.addStep(SetPropertyFromCommand(
508 description = "Finding number of CPUs",
509 command = ["nproc"]))
511 # find gcc and g++ compilers
512 factory.addStep(FileDownload(
513 name = "dlfindbinpl",
514 mastersrc = scripts_dir + '/findbin.pl',
515 workerdest = "../findbin.pl",
518 factory.addStep(SetPropertyFromCommand(
520 property = "cc_command",
521 description = "Finding gcc command",
523 "../findbin.pl", "gcc", "", "",
525 haltOnFailure = True))
527 factory.addStep(SetPropertyFromCommand(
529 property = "cxx_command",
530 description = "Finding g++ command",
532 "../findbin.pl", "g++", "", "",
534 haltOnFailure = True))
536 # see if ccache is available
537 factory.addStep(SetPropertyFromCommand(
538 property = "ccache_command",
539 command = ["which", "ccache"],
540 description = "Testing for ccache command",
541 haltOnFailure = False,
542 flunkOnFailure = False,
543 warnOnFailure = False,
546 # Workaround bug when switching from a checked out tag back to a branch
547 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
548 factory.addStep(ShellCommand(
549 name = "gitcheckout",
550 description = "Ensure that Git HEAD is sane",
551 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
552 haltOnFailure = True))
554 # check out the source
556 # if repo doesn't exist: 'git clone repourl'
557 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
558 # 'git fetch -t repourl branch; git reset --hard revision'
562 branch = repo_branch,
566 haltOnFailure = True,
570 factory.addStep(ShellCommand(
572 description = "Fetching Git remote refs",
573 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
578 factory.addStep(ShellCommand(
580 description = "Checking out Git tag",
581 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
582 haltOnFailure = True,
583 doStepIf = IsTaggingRequested
586 # Verify that Git HEAD points to a tag or branch
587 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
588 factory.addStep(ShellCommand(
590 description = "Ensure that Git HEAD is pointing to a branch or tag",
591 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
592 haltOnFailure = True))
594 factory.addStep(ShellCommand(
596 description = "Remove tmp folder",
597 command=["rm", "-rf", "tmp/"]))
600 factory.addStep(ShellCommand(
601 name = "rmfeedlinks",
602 description = "Remove feed symlinks",
603 command=["rm", "-rf", "package/feeds/"]))
605 factory.addStep(StringDownload(
607 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
608 workerdest = "../ccache_cc.sh",
612 factory.addStep(StringDownload(
614 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
615 workerdest = "../ccache_cxx.sh",
620 factory.addStep(ShellCommand(
621 name = "updatefeeds",
622 description = "Updating feeds",
623 command=["./scripts/feeds", "update"],
624 env = MakeEnv(tryccache=True),
625 haltOnFailure = True,
630 factory.addStep(ShellCommand(
631 name = "installfeeds",
632 description = "Installing feeds",
633 command=["./scripts/feeds", "install", "-a"],
634 env = MakeEnv(tryccache=True),
639 if config_seed is not None:
640 factory.addStep(StringDownload(
641 name = "dlconfigseed",
642 s = config_seed + '\n',
643 workerdest = ".config",
648 factory.addStep(ShellCommand(
650 description = "Seeding .config",
651 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
654 factory.addStep(ShellCommand(
656 description = "Removing output directory",
657 command = ["rm", "-rf", "bin/"]
660 factory.addStep(ShellCommand(
662 description = "Populating .config",
663 command = ["make", "defconfig"],
668 factory.addStep(ShellCommand(
670 description = "Checking architecture",
671 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
679 factory.addStep(SetPropertyFromCommand(
682 description = "Finding libc suffix",
683 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
686 if usign_key is not None:
687 factory.addStep(StringDownload(
688 name = "dlkeybuildpub",
689 s = UsignSec2Pub(usign_key, usign_comment),
690 workerdest = "key-build.pub",
694 factory.addStep(StringDownload(
696 s = "# fake private key",
697 workerdest = "key-build",
701 factory.addStep(StringDownload(
702 name = "dlkeybuilducert",
703 s = "# fake certificate",
704 workerdest = "key-build.ucert",
709 factory.addStep(ShellCommand(
711 description = "Preparing dl/",
712 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
718 factory.addStep(ShellCommand(
720 description = "Building and installing GNU tar",
721 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/tar/compile", "V=s"],
722 env = MakeEnv(tryccache=True),
727 factory.addStep(ShellCommand(
729 description = "Populating dl/",
730 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "download", "V=s"],
733 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
736 factory.addStep(ShellCommand(
738 description = "Cleaning base-files",
739 command=["make", "package/base-files/clean", "V=s"]
743 factory.addStep(ShellCommand(
745 description = "Building and installing tools",
746 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "tools/install", "V=s"],
747 env = MakeEnv(tryccache=True),
751 factory.addStep(ShellCommand(
753 description = "Building and installing toolchain",
754 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "toolchain/install", "V=s"],
759 factory.addStep(ShellCommand(
761 description = "Building kmods",
762 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
767 # find kernel version
768 factory.addStep(SetPropertyFromCommand(
769 name = "kernelversion",
770 property = "kernelversion",
771 description = "Finding the effective Kernel version",
772 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
773 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
776 factory.addStep(ShellCommand(
778 description = "Cleaning up package build",
779 command=["make", "package/cleanup", "V=s"]
782 factory.addStep(ShellCommand(
784 description = "Building packages",
785 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
790 factory.addStep(ShellCommand(
792 description = "Installing packages",
793 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/install", "V=s"],
798 factory.addStep(ShellCommand(
800 description = "Indexing packages",
801 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
806 factory.addStep(ShellCommand(
808 description = "Building and installing images",
809 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "target/install", "V=s"],
814 factory.addStep(ShellCommand(
816 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
817 command = "make -j1 buildinfo V=s || true",
822 factory.addStep(ShellCommand(
823 name = "json_overview_image_info",
824 description = "Generate profiles.json in target folder",
825 command = "make -j1 json_overview_image_info V=s || true",
830 factory.addStep(ShellCommand(
832 description = "Calculating checksums",
833 command=["make", "-j1", "checksum", "V=s"],
838 if enable_kmod_archive:
839 factory.addStep(ShellCommand(
841 description = "Creating kmod directory",
842 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
846 factory.addStep(ShellCommand(
847 name = "kmodprepare",
848 description = "Preparing kmod archive",
849 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
850 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
851 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
855 factory.addStep(ShellCommand(
857 description = "Indexing kmod archive",
858 command=["make", Interpolate("-j%(prop:nproc:-1)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
859 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
865 if ini.has_option("gpg", "key") or usign_key is not None:
866 factory.addStep(MasterShellCommand(
867 name = "signprepare",
868 description = "Preparing temporary signing directory",
869 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
873 factory.addStep(ShellCommand(
875 description = "Packing files to sign",
876 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
880 factory.addStep(FileUpload(
881 workersrc = "sign.tar.gz",
882 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
886 factory.addStep(MasterShellCommand(
888 description = "Signing files",
889 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
890 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
894 factory.addStep(FileDownload(
895 name = "dlsigntargz",
896 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
897 workerdest = "sign.tar.gz",
901 factory.addStep(ShellCommand(
903 description = "Unpacking signed files",
904 command = ["tar", "-xzf", "sign.tar.gz"],
909 factory.addStep(ShellCommand(
911 description = "Preparing upload directory structure",
912 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
916 factory.addStep(ShellCommand(
917 name = "linkprepare",
918 description = "Preparing repository symlink",
919 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
920 doStepIf = IsNoMasterBuild,
924 if enable_kmod_archive:
925 factory.addStep(ShellCommand(
926 name = "kmoddirprepare",
927 description = "Preparing kmod archive upload directory",
928 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
932 factory.addStep(ShellCommand(
934 description = "Uploading directory structure",
935 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
936 env={'RSYNC_PASSWORD': rsync_bin_key},
937 haltOnFailure = True,
942 # download remote sha256sums to 'target-sha256sums'
943 factory.addStep(ShellCommand(
944 name = "target-sha256sums",
945 description = "Fetching remote sha256sums for target",
946 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
947 env={'RSYNC_PASSWORD': rsync_bin_key},
949 haltOnFailure = False,
950 flunkOnFailure = False,
951 warnOnFailure = False,
954 # build list of files to upload
955 factory.addStep(FileDownload(
956 name = "dlsha2rsyncpl",
957 mastersrc = scripts_dir + '/sha2rsync.pl',
958 workerdest = "../sha2rsync.pl",
962 factory.addStep(ShellCommand(
964 description = "Building list of files to upload",
965 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
966 haltOnFailure = True,
969 factory.addStep(FileDownload(
971 mastersrc = scripts_dir + '/rsync.sh',
972 workerdest = "../rsync.sh",
976 # upload new files and update existing ones
977 factory.addStep(ShellCommand(
978 name = "targetupload",
979 description = "Uploading target files",
980 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
981 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
982 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
983 env={'RSYNC_PASSWORD': rsync_bin_key},
984 haltOnFailure = True,
988 # delete files which don't exist locally
989 factory.addStep(ShellCommand(
990 name = "targetprune",
991 description = "Pruning target files",
992 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
993 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
994 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
995 env={'RSYNC_PASSWORD': rsync_bin_key},
996 haltOnFailure = True,
1001 if enable_kmod_archive:
1002 factory.addStep(ShellCommand(
1003 name = "kmodupload",
1004 description = "Uploading kmod archive",
1005 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1006 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1007 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1008 env={'RSYNC_PASSWORD': rsync_bin_key},
1009 haltOnFailure = True,
1014 if rsync_src_url is not None:
1015 factory.addStep(ShellCommand(
1016 name = "sourcelist",
1017 description = "Finding source archives to upload",
1018 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1019 haltOnFailure = True
1022 factory.addStep(ShellCommand(
1023 name = "sourceupload",
1024 description = "Uploading source archives",
1025 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1026 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1027 env={'RSYNC_PASSWORD': rsync_src_key},
1028 haltOnFailure = True,
1033 factory.addStep(ShellCommand(
1035 description = "Reporting disk usage",
1036 command=["df", "-h", "."],
1037 env={'LC_ALL': 'C'},
1038 haltOnFailure = False,
1039 flunkOnFailure = False,
1040 warnOnFailure = False,
1044 factory.addStep(ShellCommand(
1046 description = "Reporting estimated file space usage",
1047 command=["du", "-sh", "."],
1048 env={'LC_ALL': 'C'},
1049 haltOnFailure = False,
1050 flunkOnFailure = False,
1051 warnOnFailure = False,
1055 factory.addStep(ShellCommand(
1056 name = "ccachestat",
1057 description = "Reporting ccache stats",
1058 command=["ccache", "-s"],
1059 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1060 want_stderr = False,
1061 haltOnFailure = False,
1062 flunkOnFailure = False,
1063 warnOnFailure = False,
1067 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1069 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1070 force_factory.addStep(steps.Trigger(
1071 name = "trigger_%s" % target,
1072 description = "Triggering %s build" % target,
1073 schedulerNames = [ "trigger_%s" % target ],
1074 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1075 doStepIf = IsTargetSelected(target)
1079 ####### STATUS TARGETS
1081 # 'status' is a list of Status Targets. The results of each build will be
1082 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1083 # including web pages, email senders, and IRC bots.
1085 if "status_bind" in inip1:
1087 'port': inip1.get("status_bind"),
1089 'waterfall_view': True,
1090 'console_view': True,
1095 if "status_user" in inip1 and "status_password" in inip1:
1096 c['www']['auth'] = util.UserPasswordAuth([
1097 (inip1.get("status_user"), inip1.get("status_password"))
1099 c['www']['authz'] = util.Authz(
1100 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1101 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1105 if ini.has_section("irc"):
1107 irc_host = iniirc.get("host", None)
1108 irc_port = iniirc.getint("port", 6667)
1109 irc_chan = iniirc.get("channel", None)
1110 irc_nick = iniirc.get("nickname", None)
1111 irc_pass = iniirc.get("password", None)
1113 if irc_host and irc_nick and irc_chan:
1114 irc = reporters.IRC(irc_host, irc_nick,
1116 password = irc_pass,
1117 channels = [ irc_chan ],
1118 notify_events = [ 'exception', 'problem', 'recovery' ]
1121 c['services'].append(irc)
1123 c['revlink'] = util.RevlinkMatch([
1124 r'https://git.openwrt.org/openwrt/(.*).git'
1126 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1131 # This specifies what database buildbot uses to store its state. You can leave
1132 # this at its default for all but the largest installations.
1133 'db_url' : "sqlite:///state.sqlite",
1136 c['buildbotNetUsageData'] = None