2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand, SetProperty
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
63 ####### PROJECT IDENTITY
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
78 c['buildbotURL'] = inip1.get("buildbot_url")
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'shared_wd':True }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
96 if ini.has_option(section, "cleanup"):
97 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
98 if ini.has_option(section, "dl_lock"):
99 lockname = ini.get(section, "dl_lock")
100 sl_props['dl_lock'] = lockname
101 if lockname not in NetLocks:
102 NetLocks[lockname] = locks.MasterLock(lockname)
103 if ini.has_option(section, "ul_lock"):
104 lockname = ini.get(section, "dl_lock")
105 sl_props['ul_lock'] = lockname
106 if lockname not in NetLocks:
107 NetLocks[lockname] = locks.MasterLock(lockname)
108 if ini.has_option(section, "shared_wd"):
109 shared_wd = ini.getboolean(section, "shared_wd")
110 sl_props['shared_wd'] = shared_wd
111 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
113 # PB port can be either a numeric port or a connection string
114 pb_port = inip1.get("port") or 9989
115 c['protocols'] = {'pb': {'port': pb_port}}
118 c['collapseRequests'] = True
120 # Reduce amount of backlog data
121 c['configurators'] = [util.JanitorConfigurator(
122 logHorizon=timedelta(days=3),
126 @defer.inlineCallbacks
127 def getNewestCompleteTime(bldr):
128 """Returns the complete_at of the latest completed and not SKIPPED
129 build request for this builder, or None if there are no such build
130 requests. We need to filter out SKIPPED requests because we're
131 using collapseRequests=True which is unfortunately marking all
132 previous requests as complete when new buildset is created.
134 @returns: datetime instance or None, via Deferred
137 bldrid = yield bldr.getBuilderId()
138 completed = yield bldr.master.data.get(
139 ('builders', bldrid, 'buildrequests'),
141 resultspec.Filter('complete', 'eq', [True]),
142 resultspec.Filter('results', 'ne', [results.SKIPPED]),
144 order=['-complete_at'], limit=1)
148 complete_at = completed[0]['complete_at']
150 last_build = yield bldr.master.data.get(
153 resultspec.Filter('builderid', 'eq', [bldrid]),
155 order=['-started_at'], limit=1)
157 if last_build and last_build[0]:
158 last_complete_at = last_build[0]['complete_at']
159 if last_complete_at and (last_complete_at > complete_at):
160 return last_complete_at
164 @defer.inlineCallbacks
165 def prioritizeBuilders(master, builders):
166 """Returns sorted list of builders by their last timestamp of completed and
169 @returns: list of sorted builders
172 def is_building(bldr):
173 return bool(bldr.building) or bool(bldr.old_building)
176 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
177 d.addCallback(lambda complete_at: (complete_at, bldr))
181 (complete_at, bldr) = item
185 complete_at = date.replace(tzinfo=tzutc())
187 if is_building(bldr):
189 complete_at = date.replace(tzinfo=tzutc())
191 return (complete_at, bldr.name)
193 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
194 results.sort(key=bldr_sort)
197 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
199 return [r[1] for r in results]
201 c['prioritizeBuilders'] = prioritizeBuilders
203 ####### CHANGESOURCES
205 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
206 scripts_dir = os.path.abspath("../scripts")
208 tree_expire = inip1.getint("expire", 0)
209 config_seed = inip1.get("config_seed", "")
211 repo_url = ini['repo'].get("url")
212 repo_branch = ini['repo'].get("branch", "master")
214 rsync_bin_url = ini['rsync'].get("binary_url")
215 rsync_bin_key = ini['rsync'].get("binary_password")
216 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
218 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
219 rsync_bin_defopts += ["--contimeout=20"]
221 rsync_src_url = ini['rsync'].get("source_url")
222 rsync_src_key = ini['rsync'].get("source_password")
223 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
225 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
226 rsync_src_defopts += ["--contimeout=20"]
229 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
231 if ini.has_section("usign"):
232 usign_key = ini['usign'].get("key")
233 usign_comment = ini['usign'].get("comment", usign_comment)
235 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
241 if not os.path.isdir(work_dir+'/source.git'):
242 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
244 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
246 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
247 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
248 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
251 line = findtargets.stdout.readline()
254 ta = line.decode().strip().split(' ')
255 targets.append(ta[0])
258 # the 'change_source' setting tells the buildmaster how it should find out
259 # about source code changes. Here we point to the buildbot clone of pyflakes.
261 c['change_source'] = []
262 c['change_source'].append(GitPoller(
264 workdir=work_dir+'/work.git', branch=repo_branch,
269 # Configure the Schedulers, which decide how to react to incoming changes. In this
270 # case, just kick off a 'basebuild' build
272 class TagChoiceParameter(BaseParameter):
273 spec_attributes = ["strict", "choices"]
277 def __init__(self, name, label=None, **kw):
278 super().__init__(name, label, **kw)
279 self._choice_list = []
284 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
287 findtags = subprocess.Popen(
288 ['git', 'ls-remote', '--tags', repo_url],
289 stdout = subprocess.PIPE)
292 line = findtags.stdout.readline()
297 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
299 if tagver and tagver[1].find(basever[1]) == 0:
300 taglist.append(tagver[1])
302 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
303 taglist.insert(0, '')
305 self._choice_list = taglist
307 return self._choice_list
309 def parse_from_arg(self, s):
310 if self.strict and s not in self._choice_list:
311 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
315 c['schedulers'].append(SingleBranchScheduler(
317 change_filter = filter.ChangeFilter(branch=repo_branch),
318 treeStableTimer = 60,
319 builderNames = targets))
321 c['schedulers'].append(ForceScheduler(
323 buttonName = "Force builds",
324 label = "Force build details",
325 builderNames = [ "00_force_build" ],
328 util.CodebaseParameter(
330 label = "Repository",
331 branch = util.FixedParameter(name = "branch", default = ""),
332 revision = util.FixedParameter(name = "revision", default = ""),
333 repository = util.FixedParameter(name = "repository", default = ""),
334 project = util.FixedParameter(name = "project", default = "")
338 reason = util.StringParameter(
341 default = "Trigger build",
347 util.NestedParameter(
349 label="Build Options",
352 util.ChoiceStringParameter(
354 label = "Build target",
356 choices = [ "all" ] + targets
370 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
371 # what steps, and which workers can execute them. Note that any particular build will
372 # only take place on one worker.
374 def IsSharedWorkdir(step):
375 return bool(step.getProperty("shared_wd"))
377 def IsCleanupRequested(step):
378 if IsSharedWorkdir(step):
380 do_cleanup = step.getProperty("do_cleanup")
386 def IsExpireRequested(step):
387 if IsSharedWorkdir(step):
390 return not IsCleanupRequested(step)
392 def IsTaggingRequested(step):
393 val = step.getProperty("tag")
394 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
399 def IsNoMasterBuild(step):
400 return repo_branch != "master"
402 def GetBaseVersion():
403 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
404 return repo_branch.split('-')[1]
409 def GetVersionPrefix(props):
410 basever = GetBaseVersion()
411 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
412 return "%s/" % props["tag"]
413 elif basever != "master":
414 return "%s-SNAPSHOT/" % basever
418 def GetNextBuild(builder, requests):
420 if r.properties and r.properties.hasProperty("tag"):
424 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
427 def MakeEnv(overrides=None, tryccache=False):
429 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
430 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
433 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
434 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
435 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
437 env['CC'] = env['CCC']
438 env['CXX'] = env['CCXX']
440 if overrides is not None:
441 env.update(overrides)
445 def NetLockDl(props):
447 if props.hasProperty("dl_lock"):
448 lock = NetLocks[props["dl_lock"]]
450 return [lock.access('exclusive')]
455 def NetLockUl(props):
457 if props.hasProperty("ul_lock"):
458 lock = NetLocks[props["ul_lock"]]
460 return [lock.access('exclusive')]
465 def TagPropertyValue(props):
466 if props.hasProperty("options"):
467 options = props.getProperty("options")
468 if type(options) is dict:
469 return options.get("tag")
472 def IsTargetSelected(target):
473 def CheckTargetProperty(step):
475 options = step.getProperty("options")
476 if type(options) is dict:
477 selected_target = options.get("target", "all")
478 if selected_target != "all" and selected_target != target:
485 return CheckTargetProperty
487 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
489 seckey = base64.b64decode(seckey)
493 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
494 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
499 dlLock = locks.WorkerLock("worker_dl")
503 for worker in c['workers']:
504 workerNames.append(worker.workername)
506 force_factory = BuildFactory()
508 c['builders'].append(BuilderConfig(
509 name = "00_force_build",
510 workernames = workerNames,
511 factory = force_factory))
513 for target in targets:
514 ts = target.split('/')
516 factory = BuildFactory()
518 # setup shared work directory if required
519 factory.addStep(ShellCommand(
521 description = "Setting up shared work directory",
522 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
524 haltOnFailure = True,
525 doStepIf = IsSharedWorkdir))
527 # find number of cores
528 factory.addStep(SetPropertyFromCommand(
531 description = "Finding number of CPUs",
532 command = ["nproc"]))
535 factory.addStep(SetProperty(
538 description = "Set max concurrency",
539 value = Interpolate("%(prop:nproc:-1)s")))
541 # find gcc and g++ compilers
542 factory.addStep(FileDownload(
543 name = "dlfindbinpl",
544 mastersrc = scripts_dir + '/findbin.pl',
545 workerdest = "../findbin.pl",
548 factory.addStep(SetPropertyFromCommand(
550 property = "cc_command",
551 description = "Finding gcc command",
553 "../findbin.pl", "gcc", "", "",
555 haltOnFailure = True))
557 factory.addStep(SetPropertyFromCommand(
559 property = "cxx_command",
560 description = "Finding g++ command",
562 "../findbin.pl", "g++", "", "",
564 haltOnFailure = True))
566 # see if ccache is available
567 factory.addStep(SetPropertyFromCommand(
568 property = "ccache_command",
569 command = ["which", "ccache"],
570 description = "Testing for ccache command",
571 haltOnFailure = False,
572 flunkOnFailure = False,
573 warnOnFailure = False,
576 # expire tree if needed
578 factory.addStep(FileDownload(
580 doStepIf = IsExpireRequested,
581 mastersrc = scripts_dir + '/expire.sh',
582 workerdest = "../expire.sh",
585 factory.addStep(ShellCommand(
587 description = "Checking for build tree expiry",
588 command = ["./expire.sh", str(tree_expire)],
590 haltOnFailure = True,
591 doStepIf = IsExpireRequested,
594 # cleanup.sh if needed
595 factory.addStep(FileDownload(
596 name = "dlcleanupsh",
597 mastersrc = scripts_dir + '/cleanup.sh',
598 workerdest = "../cleanup.sh",
600 doStepIf = IsCleanupRequested))
602 factory.addStep(ShellCommand(
604 description = "Cleaning previous builds",
605 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
607 haltOnFailure = True,
608 doStepIf = IsCleanupRequested,
611 factory.addStep(ShellCommand(
613 description = "Cleaning work area",
614 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
616 haltOnFailure = True,
617 doStepIf = IsCleanupRequested,
620 # Workaround bug when switching from a checked out tag back to a branch
621 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
622 factory.addStep(ShellCommand(
623 name = "gitcheckout",
624 description = "Ensure that Git HEAD is sane",
625 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
626 haltOnFailure = True))
628 # check out the source
630 # if repo doesn't exist: 'git clone repourl'
631 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
632 # 'git fetch -t repourl branch; git reset --hard revision'
636 branch = repo_branch,
638 method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
640 haltOnFailure = True,
644 factory.addStep(ShellCommand(
646 description = "Fetching Git remote refs",
647 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
652 factory.addStep(ShellCommand(
654 description = "Checking out Git tag",
655 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
656 haltOnFailure = True,
657 doStepIf = IsTaggingRequested
660 # Verify that Git HEAD points to a tag or branch
661 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
662 factory.addStep(ShellCommand(
664 description = "Ensure that Git HEAD is pointing to a branch or tag",
665 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
666 haltOnFailure = True))
668 factory.addStep(ShellCommand(
670 description = "Remove tmp folder",
671 command=["rm", "-rf", "tmp/"]))
674 factory.addStep(ShellCommand(
675 name = "rmfeedlinks",
676 description = "Remove feed symlinks",
677 command=["rm", "-rf", "package/feeds/"]))
679 factory.addStep(StringDownload(
681 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
682 workerdest = "../ccache_cc.sh",
686 factory.addStep(StringDownload(
688 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
689 workerdest = "../ccache_cxx.sh",
694 factory.addStep(ShellCommand(
695 name = "updatefeeds",
696 description = "Updating feeds",
697 command=["./scripts/feeds", "update"],
698 env = MakeEnv(tryccache=True),
699 haltOnFailure = True,
704 factory.addStep(ShellCommand(
705 name = "installfeeds",
706 description = "Installing feeds",
707 command=["./scripts/feeds", "install", "-a"],
708 env = MakeEnv(tryccache=True),
713 if config_seed is not None:
714 factory.addStep(StringDownload(
715 name = "dlconfigseed",
716 s = config_seed + '\n',
717 workerdest = ".config",
722 factory.addStep(ShellCommand(
724 description = "Seeding .config",
725 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
728 factory.addStep(ShellCommand(
730 description = "Removing output directory",
731 command = ["rm", "-rf", "bin/"]
734 factory.addStep(ShellCommand(
736 description = "Populating .config",
737 command = ["make", "defconfig"],
742 factory.addStep(ShellCommand(
744 description = "Checking architecture",
745 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
753 factory.addStep(SetPropertyFromCommand(
756 description = "Finding libc suffix",
757 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
760 if usign_key is not None:
761 factory.addStep(StringDownload(
762 name = "dlkeybuildpub",
763 s = UsignSec2Pub(usign_key, usign_comment),
764 workerdest = "key-build.pub",
768 factory.addStep(StringDownload(
770 s = "# fake private key",
771 workerdest = "key-build",
775 factory.addStep(StringDownload(
776 name = "dlkeybuilducert",
777 s = "# fake certificate",
778 workerdest = "key-build.ucert",
783 factory.addStep(ShellCommand(
785 description = "Preparing dl/",
786 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
792 factory.addStep(ShellCommand(
794 description = "Building and installing GNU tar",
795 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/tar/compile", "V=s"],
796 env = MakeEnv(tryccache=True),
801 factory.addStep(ShellCommand(
803 description = "Populating dl/",
804 command = ["make", Interpolate("-j%(prop:njobs)s"), "download", "V=s"],
807 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
810 factory.addStep(ShellCommand(
812 description = "Cleaning base-files",
813 command=["make", "package/base-files/clean", "V=s"]
817 factory.addStep(ShellCommand(
819 description = "Building and installing tools",
820 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/install", "V=s"],
821 env = MakeEnv(tryccache=True),
825 factory.addStep(ShellCommand(
827 description = "Building and installing toolchain",
828 command=["make", Interpolate("-j%(prop:njobs)s"), "toolchain/install", "V=s"],
833 factory.addStep(ShellCommand(
835 description = "Building kmods",
836 command=["make", Interpolate("-j%(prop:njobs)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
841 # find kernel version
842 factory.addStep(SetPropertyFromCommand(
843 name = "kernelversion",
844 property = "kernelversion",
845 description = "Finding the effective Kernel version",
846 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
847 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
850 factory.addStep(ShellCommand(
852 description = "Cleaning up package build",
853 command=["make", "package/cleanup", "V=s"]
856 factory.addStep(ShellCommand(
858 description = "Building packages",
859 command=["make", Interpolate("-j%(prop:njobs)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
864 factory.addStep(ShellCommand(
866 description = "Installing packages",
867 command=["make", Interpolate("-j%(prop:njobs)s"), "package/install", "V=s"],
872 factory.addStep(ShellCommand(
874 description = "Indexing packages",
875 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
880 factory.addStep(ShellCommand(
882 description = "Building and installing images",
883 command=["make", Interpolate("-j%(prop:njobs)s"), "target/install", "V=s"],
888 factory.addStep(ShellCommand(
890 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
891 command = "make -j1 buildinfo V=s || true",
896 factory.addStep(ShellCommand(
897 name = "json_overview_image_info",
898 description = "Generate profiles.json in target folder",
899 command = "make -j1 json_overview_image_info V=s || true",
904 factory.addStep(ShellCommand(
906 description = "Calculating checksums",
907 command=["make", "-j1", "checksum", "V=s"],
912 if enable_kmod_archive:
913 factory.addStep(ShellCommand(
915 description = "Creating kmod directory",
916 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
920 factory.addStep(ShellCommand(
921 name = "kmodprepare",
922 description = "Preparing kmod archive",
923 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
924 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
925 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
929 factory.addStep(ShellCommand(
931 description = "Indexing kmod archive",
932 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
933 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
939 if ini.has_option("gpg", "key") or usign_key is not None:
940 factory.addStep(MasterShellCommand(
941 name = "signprepare",
942 description = "Preparing temporary signing directory",
943 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
947 factory.addStep(ShellCommand(
949 description = "Packing files to sign",
950 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
954 factory.addStep(FileUpload(
955 workersrc = "sign.tar.gz",
956 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
960 factory.addStep(MasterShellCommand(
962 description = "Signing files",
963 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
964 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
968 factory.addStep(FileDownload(
969 name = "dlsigntargz",
970 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
971 workerdest = "sign.tar.gz",
975 factory.addStep(ShellCommand(
977 description = "Unpacking signed files",
978 command = ["tar", "-xzf", "sign.tar.gz"],
983 factory.addStep(ShellCommand(
985 description = "Preparing upload directory structure",
986 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
990 factory.addStep(ShellCommand(
991 name = "linkprepare",
992 description = "Preparing repository symlink",
993 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
994 doStepIf = IsNoMasterBuild,
998 if enable_kmod_archive:
999 factory.addStep(ShellCommand(
1000 name = "kmoddirprepare",
1001 description = "Preparing kmod archive upload directory",
1002 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1003 haltOnFailure = True
1006 factory.addStep(ShellCommand(
1008 description = "Uploading directory structure",
1009 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1010 env={'RSYNC_PASSWORD': rsync_bin_key},
1011 haltOnFailure = True,
1016 # download remote sha256sums to 'target-sha256sums'
1017 factory.addStep(ShellCommand(
1018 name = "target-sha256sums",
1019 description = "Fetching remote sha256sums for target",
1020 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1021 env={'RSYNC_PASSWORD': rsync_bin_key},
1023 haltOnFailure = False,
1024 flunkOnFailure = False,
1025 warnOnFailure = False,
1028 # build list of files to upload
1029 factory.addStep(FileDownload(
1030 name = "dlsha2rsyncpl",
1031 mastersrc = scripts_dir + '/sha2rsync.pl',
1032 workerdest = "../sha2rsync.pl",
1036 factory.addStep(ShellCommand(
1038 description = "Building list of files to upload",
1039 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1040 haltOnFailure = True,
1043 factory.addStep(FileDownload(
1044 name = "dlrsync.sh",
1045 mastersrc = scripts_dir + '/rsync.sh',
1046 workerdest = "../rsync.sh",
1050 # upload new files and update existing ones
1051 factory.addStep(ShellCommand(
1052 name = "targetupload",
1053 description = "Uploading target files",
1054 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1055 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1056 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1057 env={'RSYNC_PASSWORD': rsync_bin_key},
1058 haltOnFailure = True,
1062 # delete files which don't exist locally
1063 factory.addStep(ShellCommand(
1064 name = "targetprune",
1065 description = "Pruning target files",
1066 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1067 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1068 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1069 env={'RSYNC_PASSWORD': rsync_bin_key},
1070 haltOnFailure = True,
1075 if enable_kmod_archive:
1076 factory.addStep(ShellCommand(
1077 name = "kmodupload",
1078 description = "Uploading kmod archive",
1079 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1080 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1081 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1082 env={'RSYNC_PASSWORD': rsync_bin_key},
1083 haltOnFailure = True,
1088 if rsync_src_url is not None:
1089 factory.addStep(ShellCommand(
1090 name = "sourcelist",
1091 description = "Finding source archives to upload",
1092 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1093 haltOnFailure = True
1096 factory.addStep(ShellCommand(
1097 name = "sourceupload",
1098 description = "Uploading source archives",
1099 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1100 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1101 env={'RSYNC_PASSWORD': rsync_src_key},
1102 haltOnFailure = True,
1107 factory.addStep(ShellCommand(
1109 description = "Reporting disk usage",
1110 command=["df", "-h", "."],
1111 env={'LC_ALL': 'C'},
1112 haltOnFailure = False,
1113 flunkOnFailure = False,
1114 warnOnFailure = False,
1118 factory.addStep(ShellCommand(
1120 description = "Reporting estimated file space usage",
1121 command=["du", "-sh", "."],
1122 env={'LC_ALL': 'C'},
1123 haltOnFailure = False,
1124 flunkOnFailure = False,
1125 warnOnFailure = False,
1129 factory.addStep(ShellCommand(
1130 name = "ccachestat",
1131 description = "Reporting ccache stats",
1132 command=["ccache", "-s"],
1133 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1134 want_stderr = False,
1135 haltOnFailure = False,
1136 flunkOnFailure = False,
1137 warnOnFailure = False,
1141 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1143 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1144 force_factory.addStep(steps.Trigger(
1145 name = "trigger_%s" % target,
1146 description = "Triggering %s build" % target,
1147 schedulerNames = [ "trigger_%s" % target ],
1148 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1149 doStepIf = IsTargetSelected(target)
1153 ####### STATUS TARGETS
1155 # 'status' is a list of Status Targets. The results of each build will be
1156 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1157 # including web pages, email senders, and IRC bots.
1159 if "status_bind" in inip1:
1161 'port': inip1.get("status_bind"),
1163 'waterfall_view': True,
1164 'console_view': True,
1169 if "status_user" in inip1 and "status_password" in inip1:
1170 c['www']['auth'] = util.UserPasswordAuth([
1171 (inip1.get("status_user"), inip1.get("status_password"))
1173 c['www']['authz'] = util.Authz(
1174 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1175 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1179 if ini.has_section("irc"):
1181 irc_host = iniirc.get("host", None)
1182 irc_port = iniirc.getint("port", 6667)
1183 irc_chan = iniirc.get("channel", None)
1184 irc_nick = iniirc.get("nickname", None)
1185 irc_pass = iniirc.get("password", None)
1187 if irc_host and irc_nick and irc_chan:
1188 irc = reporters.IRC(irc_host, irc_nick,
1190 password = irc_pass,
1191 channels = [ irc_chan ],
1192 notify_events = [ 'exception', 'problem', 'recovery' ]
1195 c['services'].append(irc)
1197 c['revlink'] = util.RevlinkMatch([
1198 r'https://git.openwrt.org/openwrt/(.*).git'
1200 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1205 # This specifies what database buildbot uses to store its state. You can leave
1206 # this at its default for all but the largest installations.
1207 'db_url' : "sqlite:///state.sqlite",
1210 c['buildbotNetUsageData'] = None