2 # ex: set syntax=python:
10 from dateutil.tz import tzutc
11 from datetime import datetime, timedelta
13 from twisted.internet import defer
14 from twisted.python import log
16 from buildbot import locks
17 from buildbot.data import resultspec
18 from buildbot.changes import filter
19 from buildbot.changes.gitpoller import GitPoller
20 from buildbot.config import BuilderConfig
21 from buildbot.plugins import reporters
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import properties
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Interpolate
29 from buildbot.process.properties import Property
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import BaseParameter
32 from buildbot.schedulers.forcesched import ForceScheduler
33 from buildbot.schedulers.forcesched import ValidationError
34 from buildbot.steps.master import MasterShellCommand, SetProperty
35 from buildbot.steps.shell import SetPropertyFromCommand
36 from buildbot.steps.shell import ShellCommand
37 from buildbot.steps.source.git import Git
38 from buildbot.steps.transfer import FileDownload
39 from buildbot.steps.transfer import FileUpload
40 from buildbot.steps.transfer import StringDownload
41 from buildbot.worker import Worker
44 if not os.path.exists("twistd.pid"):
45 with open("twistd.pid", "w") as pidfile:
46 pidfile.write("{}".format(os.getpid()))
48 # This is a sample buildmaster config file. It must be installed as
49 # 'master.cfg' in your buildmaster's base directory.
51 ini = configparser.ConfigParser()
52 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
54 if "general" not in ini or "phase1" not in ini or "rsync" not in ini:
55 raise ValueError("Fix your configuration")
59 # This is the dictionary that the buildmaster pays attention to. We also use
60 # a shorter alias to save typing.
61 c = BuildmasterConfig = {}
63 ####### PROJECT IDENTITY
65 # the 'title' string will appear at the top of this buildbot
66 # installation's html.WebStatus home page (linked to the
67 # 'titleURL') and is embedded in the title of the waterfall HTML page.
69 c['title'] = ini['general'].get("title")
70 c['titleURL'] = ini['general'].get("title_url")
72 # the 'buildbotURL' string should point to the location where the buildbot's
73 # internal web server (usually the html.WebStatus page) is visible. This
74 # typically uses the port number set in the Waterfall 'status' entry, but
75 # with an externally-visible host name which the buildbot cannot figure out
78 c['buildbotURL'] = inip1.get("buildbot_url")
82 # The 'workers' list defines the set of recognized buildworkers. Each element is
83 # a Worker object, specifying a unique worker name and password. The same
84 # worker name and password must be configured on the worker.
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
93 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
94 name = ini.get(section, "name")
95 password = ini.get(section, "password")
97 if ini.has_option(section, "builds"):
98 max_builds = ini.getint(section, "builds")
99 sl_props['max_builds'] = max_builds
101 sl_props['shared_wd'] = True
102 if ini.has_option(section, "cleanup"):
103 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
104 if ini.has_option(section, "dl_lock"):
105 lockname = ini.get(section, "dl_lock")
106 sl_props['dl_lock'] = lockname
107 if lockname not in NetLocks:
108 NetLocks[lockname] = locks.MasterLock(lockname)
109 if ini.has_option(section, "ul_lock"):
110 lockname = ini.get(section, "dl_lock")
111 sl_props['ul_lock'] = lockname
112 if lockname not in NetLocks:
113 NetLocks[lockname] = locks.MasterLock(lockname)
114 if ini.has_option(section, "shared_wd"):
115 shared_wd = ini.getboolean(section, "shared_wd")
116 sl_props['shared_wd'] = shared_wd
117 if shared_wd and (max_builds != 1):
118 raise ValueError('max_builds must be 1 with shared workdir!')
119 c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
121 # PB port can be either a numeric port or a connection string
122 pb_port = inip1.get("port") or 9989
123 c['protocols'] = {'pb': {'port': pb_port}}
126 c['collapseRequests'] = True
128 # Reduce amount of backlog data
129 c['configurators'] = [util.JanitorConfigurator(
130 logHorizon=timedelta(days=3),
134 @defer.inlineCallbacks
135 def getNewestCompleteTime(bldr):
136 """Returns the complete_at of the latest completed and not SKIPPED
137 build request for this builder, or None if there are no such build
138 requests. We need to filter out SKIPPED requests because we're
139 using collapseRequests=True which is unfortunately marking all
140 previous requests as complete when new buildset is created.
142 @returns: datetime instance or None, via Deferred
145 bldrid = yield bldr.getBuilderId()
146 completed = yield bldr.master.data.get(
147 ('builders', bldrid, 'buildrequests'),
149 resultspec.Filter('complete', 'eq', [True]),
150 resultspec.Filter('results', 'ne', [results.SKIPPED]),
152 order=['-complete_at'], limit=1)
156 complete_at = completed[0]['complete_at']
158 last_build = yield bldr.master.data.get(
161 resultspec.Filter('builderid', 'eq', [bldrid]),
163 order=['-started_at'], limit=1)
165 if last_build and last_build[0]:
166 last_complete_at = last_build[0]['complete_at']
167 if last_complete_at and (last_complete_at > complete_at):
168 return last_complete_at
172 @defer.inlineCallbacks
173 def prioritizeBuilders(master, builders):
174 """Returns sorted list of builders by their last timestamp of completed and
177 @returns: list of sorted builders
180 def is_building(bldr):
181 return bool(bldr.building) or bool(bldr.old_building)
184 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
185 d.addCallback(lambda complete_at: (complete_at, bldr))
189 (complete_at, bldr) = item
193 complete_at = date.replace(tzinfo=tzutc())
195 if is_building(bldr):
197 complete_at = date.replace(tzinfo=tzutc())
199 return (complete_at, bldr.name)
201 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
202 results.sort(key=bldr_sort)
205 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
207 return [r[1] for r in results]
209 c['prioritizeBuilders'] = prioritizeBuilders
211 ####### CHANGESOURCES
213 work_dir = os.path.abspath(ini['general'].get("workdir", "."))
214 scripts_dir = os.path.abspath("../scripts")
216 tree_expire = inip1.getint("expire", 0)
217 config_seed = inip1.get("config_seed", "")
219 repo_url = ini['repo'].get("url")
220 repo_branch = ini['repo'].get("branch", "master")
222 rsync_bin_url = ini['rsync'].get("binary_url")
223 rsync_bin_key = ini['rsync'].get("binary_password")
224 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
226 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
227 rsync_bin_defopts += ["--contimeout=20"]
229 rsync_src_url = ini['rsync'].get("source_url")
230 rsync_src_key = ini['rsync'].get("source_password")
231 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
233 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
234 rsync_src_defopts += ["--contimeout=20"]
237 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
239 if ini.has_section("usign"):
240 usign_key = ini['usign'].get("key")
241 usign_comment = ini['usign'].get("comment", usign_comment)
243 enable_kmod_archive = inip1.getboolean("kmod_archive", False)
249 if not os.path.isdir(work_dir+'/source.git'):
250 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
252 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
254 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
255 findtargets = subprocess.Popen(['./scripts/dump-target-info.pl', 'targets'],
256 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
259 line = findtargets.stdout.readline()
262 ta = line.decode().strip().split(' ')
263 targets.append(ta[0])
266 # the 'change_source' setting tells the buildmaster how it should find out
267 # about source code changes. Here we point to the buildbot clone of pyflakes.
269 c['change_source'] = []
270 c['change_source'].append(GitPoller(
272 workdir=work_dir+'/work.git', branch=repo_branch,
277 # Configure the Schedulers, which decide how to react to incoming changes. In this
278 # case, just kick off a 'basebuild' build
280 class TagChoiceParameter(BaseParameter):
281 spec_attributes = ["strict", "choices"]
285 def __init__(self, name, label=None, **kw):
286 super().__init__(name, label, **kw)
287 self._choice_list = []
292 basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
295 findtags = subprocess.Popen(
296 ['git', 'ls-remote', '--tags', repo_url],
297 stdout = subprocess.PIPE)
300 line = findtags.stdout.readline()
305 tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
307 if tagver and tagver[1].find(basever[1]) == 0:
308 taglist.append(tagver[1])
310 taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
311 taglist.insert(0, '')
313 self._choice_list = taglist
315 return self._choice_list
317 def parse_from_arg(self, s):
318 if self.strict and s not in self._choice_list:
319 raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
323 c['schedulers'].append(SingleBranchScheduler(
325 change_filter = filter.ChangeFilter(branch=repo_branch),
326 treeStableTimer = 60,
327 builderNames = targets))
329 c['schedulers'].append(ForceScheduler(
331 buttonName = "Force builds",
332 label = "Force build details",
333 builderNames = [ "00_force_build" ],
336 util.CodebaseParameter(
338 label = "Repository",
339 branch = util.FixedParameter(name = "branch", default = ""),
340 revision = util.FixedParameter(name = "revision", default = ""),
341 repository = util.FixedParameter(name = "repository", default = ""),
342 project = util.FixedParameter(name = "project", default = "")
346 reason = util.StringParameter(
349 default = "Trigger build",
355 util.NestedParameter(
357 label="Build Options",
360 util.ChoiceStringParameter(
362 label = "Build target",
364 choices = [ "all" ] + targets
378 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
379 # what steps, and which workers can execute them. Note that any particular build will
380 # only take place on one worker.
382 def IsSharedWorkdir(step):
383 return bool(step.getProperty("shared_wd"))
385 def IsCleanupRequested(step):
386 if IsSharedWorkdir(step):
388 do_cleanup = step.getProperty("do_cleanup")
394 def IsExpireRequested(step):
395 if IsSharedWorkdir(step):
398 return not IsCleanupRequested(step)
400 def IsTaggingRequested(step):
401 val = step.getProperty("tag")
402 if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
407 def IsNoMasterBuild(step):
408 return repo_branch != "master"
410 def GetBaseVersion():
411 if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
412 return repo_branch.split('-')[1]
417 def GetVersionPrefix(props):
418 basever = GetBaseVersion()
419 if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
420 return "%s/" % props["tag"]
421 elif basever != "master":
422 return "%s-SNAPSHOT/" % basever
427 def GetNumJobs(props):
428 nproc = int(props.getProperty("nproc", "1"))
429 njobs = int(nproc / props.getProperty("max_builds", 1)) or 1
432 def GetNextBuild(builder, requests):
434 if r.properties and r.properties.hasProperty("tag"):
438 log.msg("GetNextBuild: {:>20} id: {} bsid: {}".format(builder.name, r.id, r.bsid))
441 def MakeEnv(overrides=None, tryccache=False):
443 'CCC': Interpolate("%(prop:cc_command:-gcc)s"),
444 'CCXX': Interpolate("%(prop:cxx_command:-g++)s"),
447 env['CC'] = Interpolate("%(prop:builddir)s/ccache_cc.sh")
448 env['CXX'] = Interpolate("%(prop:builddir)s/ccache_cxx.sh")
449 env['CCACHE'] = Interpolate("%(prop:ccache_command:-)s")
451 env['CC'] = env['CCC']
452 env['CXX'] = env['CCXX']
454 if overrides is not None:
455 env.update(overrides)
459 def NetLockDl(props):
461 if props.hasProperty("dl_lock"):
462 lock = NetLocks[props["dl_lock"]]
464 return [lock.access('exclusive')]
469 def NetLockUl(props):
471 if props.hasProperty("ul_lock"):
472 lock = NetLocks[props["ul_lock"]]
474 return [lock.access('exclusive')]
479 def TagPropertyValue(props):
480 if props.hasProperty("options"):
481 options = props.getProperty("options")
482 if type(options) is dict:
483 return options.get("tag")
486 def IsTargetSelected(target):
487 def CheckTargetProperty(step):
489 options = step.getProperty("options")
490 if type(options) is dict:
491 selected_target = options.get("target", "all")
492 if selected_target != "all" and selected_target != target:
499 return CheckTargetProperty
501 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
503 seckey = base64.b64decode(seckey)
507 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
508 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
513 dlLock = locks.WorkerLock("worker_dl")
517 for worker in c['workers']:
518 workerNames.append(worker.workername)
520 force_factory = BuildFactory()
522 c['builders'].append(BuilderConfig(
523 name = "00_force_build",
524 workernames = workerNames,
525 factory = force_factory))
527 for target in targets:
528 ts = target.split('/')
530 factory = BuildFactory()
532 # setup shared work directory if required
533 factory.addStep(ShellCommand(
535 description = "Setting up shared work directory",
536 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
538 haltOnFailure = True,
539 doStepIf = IsSharedWorkdir))
541 # find number of cores
542 factory.addStep(SetPropertyFromCommand(
545 description = "Finding number of CPUs",
546 command = ["nproc"]))
549 factory.addStep(SetProperty(
552 description = "Set max concurrency",
553 value = Interpolate("%(kw:jobs)s", jobs=GetNumJobs)))
555 # find gcc and g++ compilers
556 factory.addStep(FileDownload(
557 name = "dlfindbinpl",
558 mastersrc = scripts_dir + '/findbin.pl',
559 workerdest = "../findbin.pl",
562 factory.addStep(SetPropertyFromCommand(
564 property = "cc_command",
565 description = "Finding gcc command",
567 "../findbin.pl", "gcc", "", "",
569 haltOnFailure = True))
571 factory.addStep(SetPropertyFromCommand(
573 property = "cxx_command",
574 description = "Finding g++ command",
576 "../findbin.pl", "g++", "", "",
578 haltOnFailure = True))
580 # see if ccache is available
581 factory.addStep(SetPropertyFromCommand(
582 property = "ccache_command",
583 command = ["which", "ccache"],
584 description = "Testing for ccache command",
585 haltOnFailure = False,
586 flunkOnFailure = False,
587 warnOnFailure = False,
590 # expire tree if needed
592 factory.addStep(FileDownload(
594 doStepIf = IsExpireRequested,
595 mastersrc = scripts_dir + '/expire.sh',
596 workerdest = "../expire.sh",
599 factory.addStep(ShellCommand(
601 description = "Checking for build tree expiry",
602 command = ["./expire.sh", str(tree_expire)],
604 haltOnFailure = True,
605 doStepIf = IsExpireRequested,
608 # cleanup.sh if needed
609 factory.addStep(FileDownload(
610 name = "dlcleanupsh",
611 mastersrc = scripts_dir + '/cleanup.sh',
612 workerdest = "../cleanup.sh",
614 doStepIf = IsCleanupRequested))
616 factory.addStep(ShellCommand(
618 description = "Cleaning previous builds",
619 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
621 haltOnFailure = True,
622 doStepIf = IsCleanupRequested,
625 factory.addStep(ShellCommand(
627 description = "Cleaning work area",
628 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
630 haltOnFailure = True,
631 doStepIf = IsCleanupRequested,
634 # Workaround bug when switching from a checked out tag back to a branch
635 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
636 factory.addStep(ShellCommand(
637 name = "gitcheckout",
638 description = "Ensure that Git HEAD is sane",
639 command = "if [ -d .git ]; then git checkout -f %s && git branch --set-upstream-to origin/%s || rm -fr .git; else exit 0; fi" %(repo_branch, repo_branch),
640 haltOnFailure = True))
642 # check out the source
644 # if repo doesn't exist: 'git clone repourl'
645 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
646 # 'git fetch -t repourl branch; git reset --hard revision'
650 branch = repo_branch,
652 method = Interpolate("%(prop:do_cleanup:#?|fresh|clean)s"),
654 haltOnFailure = True,
658 factory.addStep(ShellCommand(
660 description = "Fetching Git remote refs",
661 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
666 factory.addStep(ShellCommand(
668 description = "Checking out Git tag",
669 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
670 haltOnFailure = True,
671 doStepIf = IsTaggingRequested
674 # Verify that Git HEAD points to a tag or branch
675 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
676 factory.addStep(ShellCommand(
678 description = "Ensure that Git HEAD is pointing to a branch or tag",
679 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
680 haltOnFailure = True))
682 factory.addStep(ShellCommand(
684 description = "Remove tmp folder",
685 command=["rm", "-rf", "tmp/"]))
688 factory.addStep(ShellCommand(
689 name = "rmfeedlinks",
690 description = "Remove feed symlinks",
691 command=["rm", "-rf", "package/feeds/"]))
693 factory.addStep(StringDownload(
695 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
696 workerdest = "../ccache_cc.sh",
700 factory.addStep(StringDownload(
702 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
703 workerdest = "../ccache_cxx.sh",
708 factory.addStep(ShellCommand(
709 name = "updatefeeds",
710 description = "Updating feeds",
711 command=["./scripts/feeds", "update"],
712 env = MakeEnv(tryccache=True),
713 haltOnFailure = True,
718 factory.addStep(ShellCommand(
719 name = "installfeeds",
720 description = "Installing feeds",
721 command=["./scripts/feeds", "install", "-a"],
722 env = MakeEnv(tryccache=True),
727 if config_seed is not None:
728 factory.addStep(StringDownload(
729 name = "dlconfigseed",
730 s = config_seed + '\n',
731 workerdest = ".config",
736 factory.addStep(ShellCommand(
738 description = "Seeding .config",
739 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
742 factory.addStep(ShellCommand(
744 description = "Removing output directory",
745 command = ["rm", "-rf", "bin/"]
748 factory.addStep(ShellCommand(
750 description = "Populating .config",
751 command = ["make", "defconfig"],
756 factory.addStep(ShellCommand(
758 description = "Checking architecture",
759 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
767 factory.addStep(SetPropertyFromCommand(
770 description = "Finding libc suffix",
771 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
774 if usign_key is not None:
775 factory.addStep(StringDownload(
776 name = "dlkeybuildpub",
777 s = UsignSec2Pub(usign_key, usign_comment),
778 workerdest = "key-build.pub",
782 factory.addStep(StringDownload(
784 s = "# fake private key",
785 workerdest = "key-build",
789 factory.addStep(StringDownload(
790 name = "dlkeybuilducert",
791 s = "# fake certificate",
792 workerdest = "key-build.ucert",
797 factory.addStep(ShellCommand(
799 description = "Preparing dl/",
800 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
806 factory.addStep(ShellCommand(
808 description = "Building and installing GNU tar",
809 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/tar/compile", "V=s"],
810 env = MakeEnv(tryccache=True),
815 factory.addStep(ShellCommand(
817 description = "Populating dl/",
818 command = ["make", Interpolate("-j%(prop:njobs)s"), "download", "V=s"],
821 locks = properties.FlattenList(NetLockDl, [dlLock.access('exclusive')]),
824 factory.addStep(ShellCommand(
826 description = "Cleaning base-files",
827 command=["make", "package/base-files/clean", "V=s"]
831 factory.addStep(ShellCommand(
833 description = "Building and installing tools",
834 command = ["make", Interpolate("-j%(prop:njobs)s"), "tools/install", "V=s"],
835 env = MakeEnv(tryccache=True),
839 factory.addStep(ShellCommand(
841 description = "Building and installing toolchain",
842 command=["make", Interpolate("-j%(prop:njobs)s"), "toolchain/install", "V=s"],
847 factory.addStep(ShellCommand(
849 description = "Building kmods",
850 command=["make", Interpolate("-j%(prop:njobs)s"), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
855 # find kernel version
856 factory.addStep(SetPropertyFromCommand(
857 name = "kernelversion",
858 property = "kernelversion",
859 description = "Finding the effective Kernel version",
860 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
861 env = { 'TOPDIR': Interpolate("%(prop:builddir)s/build") }
864 factory.addStep(ShellCommand(
866 description = "Cleaning up package build",
867 command=["make", "package/cleanup", "V=s"]
870 factory.addStep(ShellCommand(
872 description = "Building packages",
873 command=["make", Interpolate("-j%(prop:njobs)s"), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
878 factory.addStep(ShellCommand(
880 description = "Installing packages",
881 command=["make", Interpolate("-j%(prop:njobs)s"), "package/install", "V=s"],
886 factory.addStep(ShellCommand(
888 description = "Indexing packages",
889 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
894 factory.addStep(ShellCommand(
896 description = "Building and installing images",
897 command=["make", Interpolate("-j%(prop:njobs)s"), "target/install", "V=s"],
902 factory.addStep(ShellCommand(
904 description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
905 command = "make -j1 buildinfo V=s || true",
910 factory.addStep(ShellCommand(
911 name = "json_overview_image_info",
912 description = "Generate profiles.json in target folder",
913 command = "make -j1 json_overview_image_info V=s || true",
918 factory.addStep(ShellCommand(
920 description = "Calculating checksums",
921 command=["make", "-j1", "checksum", "V=s"],
926 if enable_kmod_archive:
927 factory.addStep(ShellCommand(
929 description = "Creating kmod directory",
930 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
934 factory.addStep(ShellCommand(
935 name = "kmodprepare",
936 description = "Preparing kmod archive",
937 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
938 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
939 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
943 factory.addStep(ShellCommand(
945 description = "Indexing kmod archive",
946 command=["make", Interpolate("-j%(prop:njobs)s"), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
947 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
953 if ini.has_option("gpg", "key") or usign_key is not None:
954 factory.addStep(MasterShellCommand(
955 name = "signprepare",
956 description = "Preparing temporary signing directory",
957 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
961 factory.addStep(ShellCommand(
963 description = "Packing files to sign",
964 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
968 factory.addStep(FileUpload(
969 workersrc = "sign.tar.gz",
970 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
974 factory.addStep(MasterShellCommand(
976 description = "Signing files",
977 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
978 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
982 factory.addStep(FileDownload(
983 name = "dlsigntargz",
984 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
985 workerdest = "sign.tar.gz",
989 factory.addStep(ShellCommand(
991 description = "Unpacking signed files",
992 command = ["tar", "-xzf", "sign.tar.gz"],
997 factory.addStep(ShellCommand(
999 description = "Preparing upload directory structure",
1000 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1001 haltOnFailure = True
1004 factory.addStep(ShellCommand(
1005 name = "linkprepare",
1006 description = "Preparing repository symlink",
1007 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1008 doStepIf = IsNoMasterBuild,
1009 haltOnFailure = True
1012 if enable_kmod_archive:
1013 factory.addStep(ShellCommand(
1014 name = "kmoddirprepare",
1015 description = "Preparing kmod archive upload directory",
1016 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1017 haltOnFailure = True
1020 factory.addStep(ShellCommand(
1022 description = "Uploading directory structure",
1023 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1024 env={'RSYNC_PASSWORD': rsync_bin_key},
1025 haltOnFailure = True,
1030 # download remote sha256sums to 'target-sha256sums'
1031 factory.addStep(ShellCommand(
1032 name = "target-sha256sums",
1033 description = "Fetching remote sha256sums for target",
1034 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1035 env={'RSYNC_PASSWORD': rsync_bin_key},
1037 haltOnFailure = False,
1038 flunkOnFailure = False,
1039 warnOnFailure = False,
1042 # build list of files to upload
1043 factory.addStep(FileDownload(
1044 name = "dlsha2rsyncpl",
1045 mastersrc = scripts_dir + '/sha2rsync.pl',
1046 workerdest = "../sha2rsync.pl",
1050 factory.addStep(ShellCommand(
1052 description = "Building list of files to upload",
1053 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1054 haltOnFailure = True,
1057 factory.addStep(FileDownload(
1058 name = "dlrsync.sh",
1059 mastersrc = scripts_dir + '/rsync.sh',
1060 workerdest = "../rsync.sh",
1064 # upload new files and update existing ones
1065 factory.addStep(ShellCommand(
1066 name = "targetupload",
1067 description = "Uploading target files",
1068 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1069 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1070 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1071 env={'RSYNC_PASSWORD': rsync_bin_key},
1072 haltOnFailure = True,
1076 # delete files which don't exist locally
1077 factory.addStep(ShellCommand(
1078 name = "targetprune",
1079 description = "Pruning target files",
1080 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1081 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1082 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1083 env={'RSYNC_PASSWORD': rsync_bin_key},
1084 haltOnFailure = True,
1089 if enable_kmod_archive:
1090 factory.addStep(ShellCommand(
1091 name = "kmodupload",
1092 description = "Uploading kmod archive",
1093 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1094 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1095 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1096 env={'RSYNC_PASSWORD': rsync_bin_key},
1097 haltOnFailure = True,
1102 if rsync_src_url is not None:
1103 factory.addStep(ShellCommand(
1104 name = "sourcelist",
1105 description = "Finding source archives to upload",
1106 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer .config -printf '%f\\n' > sourcelist",
1107 haltOnFailure = True
1110 factory.addStep(ShellCommand(
1111 name = "sourceupload",
1112 description = "Uploading source archives",
1113 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1114 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1115 env={'RSYNC_PASSWORD': rsync_src_key},
1116 haltOnFailure = True,
1121 factory.addStep(ShellCommand(
1123 description = "Reporting disk usage",
1124 command=["df", "-h", "."],
1125 env={'LC_ALL': 'C'},
1126 haltOnFailure = False,
1127 flunkOnFailure = False,
1128 warnOnFailure = False,
1132 factory.addStep(ShellCommand(
1134 description = "Reporting estimated file space usage",
1135 command=["du", "-sh", "."],
1136 env={'LC_ALL': 'C'},
1137 haltOnFailure = False,
1138 flunkOnFailure = False,
1139 warnOnFailure = False,
1143 factory.addStep(ShellCommand(
1144 name = "ccachestat",
1145 description = "Reporting ccache stats",
1146 command=["ccache", "-s"],
1147 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1148 want_stderr = False,
1149 haltOnFailure = False,
1150 flunkOnFailure = False,
1151 warnOnFailure = False,
1155 c['builders'].append(BuilderConfig(name=target, workernames=workerNames, factory=factory, nextBuild=GetNextBuild))
1157 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
1158 force_factory.addStep(steps.Trigger(
1159 name = "trigger_%s" % target,
1160 description = "Triggering %s build" % target,
1161 schedulerNames = [ "trigger_%s" % target ],
1162 set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
1163 doStepIf = IsTargetSelected(target)
1167 ####### STATUS TARGETS
1169 # 'status' is a list of Status Targets. The results of each build will be
1170 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1171 # including web pages, email senders, and IRC bots.
1173 if "status_bind" in inip1:
1175 'port': inip1.get("status_bind"),
1177 'waterfall_view': True,
1178 'console_view': True,
1183 if "status_user" in inip1 and "status_password" in inip1:
1184 c['www']['auth'] = util.UserPasswordAuth([
1185 (inip1.get("status_user"), inip1.get("status_password"))
1187 c['www']['authz'] = util.Authz(
1188 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
1189 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[inip1.get("status_user")]) ]
1193 if ini.has_section("irc"):
1195 irc_host = iniirc.get("host", None)
1196 irc_port = iniirc.getint("port", 6667)
1197 irc_chan = iniirc.get("channel", None)
1198 irc_nick = iniirc.get("nickname", None)
1199 irc_pass = iniirc.get("password", None)
1201 if irc_host and irc_nick and irc_chan:
1202 irc = reporters.IRC(irc_host, irc_nick,
1204 password = irc_pass,
1205 channels = [ irc_chan ],
1206 notify_events = [ 'exception', 'problem', 'recovery' ]
1209 c['services'].append(irc)
1211 c['revlink'] = util.RevlinkMatch([
1212 r'https://git.openwrt.org/openwrt/(.*).git'
1214 r'https://git.openwrt.org/?p=openwrt/\1.git;a=commit;h=%s')
1219 # This specifies what database buildbot uses to store its state. You can leave
1220 # this at its default for all but the largest installations.
1221 'db_url' : "sqlite:///state.sqlite",
1224 c['buildbotNetUsageData'] = None