2 # ex: set syntax=python:
10 from buildbot import locks
12 # This is a sample buildmaster config file. It must be installed as
13 # 'master.cfg' in your buildmaster's base directory.
15 ini = ConfigParser.ConfigParser()
16 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
18 # This is the dictionary that the buildmaster pays attention to. We also use
19 # a shorter alias to save typing.
20 c = BuildmasterConfig = {}
22 ####### PROJECT IDENTITY
24 # the 'title' string will appear at the top of this buildbot
25 # installation's html.WebStatus home page (linked to the
26 # 'titleURL') and is embedded in the title of the waterfall HTML page.
28 c['title'] = ini.get("general", "title")
29 c['titleURL'] = ini.get("general", "title_url")
31 # the 'buildbotURL' string should point to the location where the buildbot's
32 # internal web server (usually the html.WebStatus page) is visible. This
33 # typically uses the port number set in the Waterfall 'status' entry, but
34 # with an externally-visible host name which the buildbot cannot figure out
37 c['buildbotURL'] = ini.get("phase1", "buildbot_url")
41 # The 'slaves' list defines the set of recognized buildslaves. Each element is
42 # a BuildSlave object, specifying a unique slave name and password. The same
43 # slave name and password must be configured on the slave.
44 from buildbot.buildslave import BuildSlave
48 if ini.has_option("phase1", "port"):
49 slave_port = ini.getint("phase1", "port")
54 for section in ini.sections():
55 if section.startswith("slave "):
56 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
57 (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
58 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
59 name = ini.get(section, "name")
60 password = ini.get(section, "password")
62 if ini.has_option(section, "builds"):
63 max_builds = ini.getint(section, "builds")
64 sl_props['max_builds'] = max_builds
65 if ini.has_option(section, "cleanup"):
66 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
67 if ini.has_option(section, "dl_lock"):
68 lockname = ini.get(section, "dl_lock")
69 sl_props['dl_lock'] = lockname
70 if lockname not in NetLocks:
71 NetLocks[lockname] = locks.MasterLock(lockname)
72 if ini.has_option(section, "ul_lock"):
73 lockname = ini.get(section, "dl_lock")
74 sl_props['ul_lock'] = lockname
75 if lockname not in NetLocks:
76 NetLocks[lockname] = locks.MasterLock(lockname)
77 if ini.has_option(section, "shared_wd"):
78 shared_wd = ini.getboolean(section, "shared_wd")
79 sl_props['shared_wd'] = shared_wd
80 if shared_wd and (max_builds != 1):
81 raise ValueError('max_builds must be 1 with shared workdir!')
82 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
84 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
85 # This must match the value configured into the buildslaves (with their
87 c['slavePortnum'] = slave_port
90 c['mergeRequests'] = True
92 # Reduce amount of backlog data
93 c['buildHorizon'] = 30
98 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
99 scripts_dir = os.path.abspath("../scripts")
112 if ini.has_option("phase1", "expire"):
113 tree_expire = ini.getint("phase1", "expire")
115 if ini.has_option("phase1", "other_builds"):
116 other_builds = ini.getint("phase1", "other_builds")
118 if ini.has_option("phase1", "cc_version"):
119 cc_version = ini.get("phase1", "cc_version").split()
120 if len(cc_version) == 1:
121 cc_version = ["eq", cc_version[0]]
123 if ini.has_option("general", "git_ssh"):
124 git_ssh = ini.getboolean("general", "git_ssh")
126 if ini.has_option("general", "git_ssh_key"):
127 git_ssh_key = ini.get("general", "git_ssh_key")
131 if ini.has_option("phase1", "config_seed"):
132 config_seed = ini.get("phase1", "config_seed")
134 repo_url = ini.get("repo", "url")
135 repo_branch = "master"
137 if ini.has_option("repo", "branch"):
138 repo_branch = ini.get("repo", "branch")
140 rsync_bin_url = ini.get("rsync", "binary_url")
141 rsync_bin_key = ini.get("rsync", "binary_password")
142 rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
144 if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
145 rsync_bin_defopts += ["--contimeout=20"]
149 rsync_src_defopts = ["-v", "-4", "--timeout=120"]
151 if ini.has_option("rsync", "source_url"):
152 rsync_src_url = ini.get("rsync", "source_url")
153 rsync_src_key = ini.get("rsync", "source_password")
155 if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
156 rsync_src_defopts += ["--contimeout=20"]
159 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
161 if ini.has_option("usign", "key"):
162 usign_key = ini.get("usign", "key")
164 if ini.has_option("usign", "comment"):
165 usign_comment = ini.get("usign", "comment")
167 enable_kmod_archive = True
173 if not os.path.isdir(work_dir+'/source.git'):
174 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
176 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
178 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
179 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
182 line = findtargets.stdout.readline()
185 ta = line.strip().split(' ')
186 targets.append(ta[0])
189 # the 'change_source' setting tells the buildmaster how it should find out
190 # about source code changes. Here we point to the buildbot clone of pyflakes.
192 from buildbot.changes.gitpoller import GitPoller
193 c['change_source'] = []
194 c['change_source'].append(GitPoller(
196 workdir=work_dir+'/work.git', branch=repo_branch,
201 # Configure the Schedulers, which decide how to react to incoming changes. In this
202 # case, just kick off a 'basebuild' build
204 from buildbot.schedulers.basic import SingleBranchScheduler
205 from buildbot.schedulers.forcesched import ForceScheduler
206 from buildbot.changes import filter
208 c['schedulers'].append(SingleBranchScheduler(
210 change_filter=filter.ChangeFilter(branch=repo_branch),
212 builderNames=targets))
214 c['schedulers'].append(ForceScheduler(
216 builderNames=targets))
220 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
221 # what steps, and which slaves can execute them. Note that any particular build will
222 # only take place on one slave.
224 from buildbot.process.factory import BuildFactory
225 from buildbot.steps.source.git import Git
226 from buildbot.steps.shell import ShellCommand
227 from buildbot.steps.shell import SetPropertyFromCommand
228 from buildbot.steps.transfer import FileUpload
229 from buildbot.steps.transfer import FileDownload
230 from buildbot.steps.transfer import StringDownload
231 from buildbot.steps.master import MasterShellCommand
232 from buildbot.process.properties import Interpolate
233 from buildbot.process import properties
237 [ "tools", "tools/clean" ],
238 [ "chain", "toolchain/clean" ],
239 [ "linux", "target/linux/clean" ],
240 [ "dir", "dirclean" ],
241 [ "dist", "distclean" ]
244 def IsMakeCleanRequested(pattern):
245 def CheckCleanProperty(step):
246 val = step.getProperty("clean")
247 if val and re.match(pattern, val):
252 return CheckCleanProperty
254 def IsSharedWorkdir(step):
255 return bool(step.getProperty("shared_wd"))
257 def IsCleanupRequested(step):
258 if IsSharedWorkdir(step):
260 do_cleanup = step.getProperty("do_cleanup")
266 def IsExpireRequested(step):
267 if IsSharedWorkdir(step):
270 return not IsCleanupRequested(step)
272 def IsGitFreshRequested(step):
273 do_cleanup = step.getProperty("do_cleanup")
279 def IsGitCleanRequested(step):
280 return not IsGitFreshRequested(step)
282 def IsTaggingRequested(step):
283 val = step.getProperty("tag")
284 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
289 def IsNoTaggingRequested(step):
290 return not IsTaggingRequested(step)
292 def IsNoMasterBuild(step):
293 return repo_branch != "master"
295 def GetBaseVersion():
296 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
297 return repo_branch.split('-')[1]
302 def GetVersionPrefix(props):
303 basever = GetBaseVersion()
304 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
305 return "%s/" % props["tag"]
306 elif basever != "master":
307 return "%s-SNAPSHOT/" % basever
312 def GetNumJobs(props):
313 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
314 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
320 if props.hasProperty("cc_command"):
321 return props["cc_command"]
327 if props.hasProperty("cxx_command"):
328 return props["cxx_command"]
334 if props.hasProperty("builddir"):
335 return props["builddir"]
336 elif props.hasProperty("workdir"):
337 return props["workdir"]
342 def GetCCache(props):
343 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
344 return props["ccache_command"]
348 def GetNextBuild(builder, requests):
350 if r.properties and r.properties.hasProperty("tag"):
354 def MakeEnv(overrides=None, tryccache=False):
356 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
357 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
360 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
361 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
362 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
364 env['CC'] = env['CCC']
365 env['CXX'] = env['CCXX']
367 if overrides is not None:
368 env.update(overrides)
372 def NetLockDl(props):
374 if props.hasProperty("dl_lock"):
375 lock = NetLocks[props["dl_lock"]]
377 return [lock.access('exclusive')]
382 def NetLockUl(props):
384 if props.hasProperty("ul_lock"):
385 lock = NetLocks[props["ul_lock"]]
387 return [lock.access('exclusive')]
391 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
393 seckey = base64.b64decode(seckey)
397 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
398 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
403 dlLock = locks.SlaveLock("slave_dl")
405 checkBuiltin = re.sub('[\t\n ]+', ' ', """
407 local symbol op path file;
408 for file in $CHANGED_FILES; do
414 while read symbol op path; do
415 case "$symbol" in package-*)
416 symbol="${symbol##*(}";
417 symbol="${symbol%)}";
418 for file in $CHANGED_FILES; do
419 case "$file" in "package/$path/"*)
420 grep -qsx "$symbol=y" .config && return 0
424 done < tmp/.packagedeps;
430 class IfBuiltinShellCommand(ShellCommand):
431 def _quote(self, str):
432 if re.search("[^a-zA-Z0-9/_.-]", str):
433 return "'%s'" %(re.sub("'", "'\"'\"'", str))
436 def setCommand(self, command):
437 if not isinstance(command, (str, unicode)):
438 command = ' '.join(map(self._quote, command))
441 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
444 def setupEnvironment(self, cmd):
445 slaveEnv = self.slaveEnvironment
449 for request in self.build.requests:
450 for source in request.sources:
451 for change in source.changes:
452 for file in change.files:
453 changedFiles[file] = True
454 fullSlaveEnv = slaveEnv.copy()
455 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
456 cmd.args['env'] = fullSlaveEnv
460 for slave in c['slaves']:
461 slaveNames.append(slave.slavename)
463 for target in targets:
464 ts = target.split('/')
466 factory = BuildFactory()
468 # setup shared work directory if required
469 factory.addStep(ShellCommand(
471 description = "Setting up shared work directory",
472 command = 'test -L "$PWD" || (mkdir -p "../shared-workdir" && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
474 haltOnFailure = True,
475 doStepIf = IsSharedWorkdir))
477 # find number of cores
478 factory.addStep(SetPropertyFromCommand(
481 description = "Finding number of CPUs",
482 command = ["nproc"]))
484 # find gcc and g++ compilers
485 factory.addStep(FileDownload(
486 name = "dlfindbinpl",
487 mastersrc = scripts_dir + '/findbin.pl',
488 slavedest = "../findbin.pl",
491 factory.addStep(SetPropertyFromCommand(
493 property = "cc_command",
494 description = "Finding gcc command",
496 "../findbin.pl", "gcc",
497 cc_version[0] if cc_version is not None else '',
498 cc_version[1] if cc_version is not None else ''
500 haltOnFailure = True))
502 factory.addStep(SetPropertyFromCommand(
504 property = "cxx_command",
505 description = "Finding g++ command",
507 "../findbin.pl", "g++",
508 cc_version[0] if cc_version is not None else '',
509 cc_version[1] if cc_version is not None else ''
511 haltOnFailure = True))
513 # see if ccache is available
514 factory.addStep(SetPropertyFromCommand(
515 property = "ccache_command",
516 command = ["which", "ccache"],
517 description = "Testing for ccache command",
518 haltOnFailure = False,
519 flunkOnFailure = False,
520 warnOnFailure = False,
523 # expire tree if needed
525 factory.addStep(FileDownload(
527 doStepIf = IsExpireRequested,
528 mastersrc = scripts_dir + '/expire.sh',
529 slavedest = "../expire.sh",
532 factory.addStep(ShellCommand(
534 description = "Checking for build tree expiry",
535 command = ["./expire.sh", str(tree_expire)],
537 haltOnFailure = True,
538 doStepIf = IsExpireRequested,
541 # cleanup.sh if needed
542 factory.addStep(FileDownload(
543 name = "dlcleanupsh",
544 mastersrc = scripts_dir + '/cleanup-phase1.sh',
545 slavedest = "../cleanup.sh",
547 doStepIf = IsCleanupRequested))
549 factory.addStep(ShellCommand(
551 description = "Cleaning previous builds",
552 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
554 haltOnFailure = True,
555 doStepIf = IsCleanupRequested,
558 factory.addStep(ShellCommand(
560 description = "Cleaning work area",
561 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
563 haltOnFailure = True,
564 doStepIf = IsCleanupRequested,
567 # user-requested clean targets
568 for tuple in CleanTargetMap:
569 factory.addStep(ShellCommand(
571 description = 'User-requested "make %s"' % tuple[1],
572 command = ["make", tuple[1], "V=s"],
574 doStepIf = IsMakeCleanRequested(tuple[0])
577 # Workaround bug when switching from a checked out tag back to a branch
578 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
579 factory.addStep(ShellCommand(
580 name = "gitcheckout",
581 description = "Ensure that Git HEAD is sane",
582 command = "if [ -d .git ]; then git checkout -f %s; git branch --set-upstream-to origin/%s; else exit 0; fi" %(repo_branch, repo_branch),
583 haltOnFailure = True))
585 # check out the source
587 # if repo doesn't exist: 'git clone repourl'
588 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
589 # 'git fetch -t repourl branch; git reset --hard revision'
590 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
591 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
595 branch = repo_branch,
598 haltOnFailure = True,
599 doStepIf = IsGitCleanRequested,
605 branch = repo_branch,
608 haltOnFailure = True,
609 doStepIf = IsGitFreshRequested,
613 factory.addStep(ShellCommand(
615 description = "Fetching Git remote refs",
616 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
621 factory.addStep(ShellCommand(
623 description = "Checking out Git tag",
624 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
625 haltOnFailure = True,
626 doStepIf = IsTaggingRequested
629 # Verify that Git HEAD points to a tag or branch
630 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
631 factory.addStep(ShellCommand(
633 description = "Ensure that Git HEAD is pointing to a branch or tag",
634 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
635 haltOnFailure = True))
637 factory.addStep(ShellCommand(
639 description = "Remove tmp folder",
640 command=["rm", "-rf", "tmp/"]))
643 # factory.addStep(ShellCommand(
644 # name = "feedsconf",
645 # description = "Copy the feeds.conf",
646 # command='''cp ~/feeds.conf ./feeds.conf''' ))
649 factory.addStep(ShellCommand(
650 name = "rmfeedlinks",
651 description = "Remove feed symlinks",
652 command=["rm", "-rf", "package/feeds/"]))
654 factory.addStep(StringDownload(
656 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
657 slavedest = "../ccache_cc.sh",
661 factory.addStep(StringDownload(
663 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
664 slavedest = "../ccache_cxx.sh",
670 factory.addStep(StringDownload(
671 name = "dlgitclonekey",
673 slavedest = "../git-clone.key",
677 factory.addStep(ShellCommand(
678 name = "patchfeedsconf",
679 description = "Patching feeds.conf",
680 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
685 factory.addStep(ShellCommand(
686 name = "updatefeeds",
687 description = "Updating feeds",
688 command=["./scripts/feeds", "update"],
689 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
695 factory.addStep(ShellCommand(
696 name = "rmfeedsconf",
697 description = "Removing feeds.conf",
698 command=["rm", "feeds.conf"],
703 factory.addStep(ShellCommand(
704 name = "installfeeds",
705 description = "Installing feeds",
706 command=["./scripts/feeds", "install", "-a"],
707 env = MakeEnv(tryccache=True),
712 if config_seed is not None:
713 factory.addStep(StringDownload(
714 name = "dlconfigseed",
715 s = config_seed + '\n',
716 slavedest = ".config",
721 factory.addStep(ShellCommand(
723 description = "Seeding .config",
724 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
727 factory.addStep(ShellCommand(
729 description = "Removing output directory",
730 command = ["rm", "-rf", "bin/"]
733 factory.addStep(ShellCommand(
735 description = "Populating .config",
736 command = ["make", "defconfig"],
741 factory.addStep(ShellCommand(
743 description = "Checking architecture",
744 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
752 factory.addStep(SetPropertyFromCommand(
755 description = "Finding libc suffix",
756 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
759 if usign_key is not None:
760 factory.addStep(StringDownload(
761 name = "dlkeybuildpub",
762 s = UsignSec2Pub(usign_key, usign_comment),
763 slavedest = "key-build.pub",
767 factory.addStep(StringDownload(
769 s = "# fake private key",
770 slavedest = "key-build",
774 factory.addStep(StringDownload(
775 name = "dlkeybuilducert",
776 s = "# fake certificate",
777 slavedest = "key-build.ucert",
782 factory.addStep(ShellCommand(
784 description = "Preparing dl/",
785 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
791 factory.addStep(ShellCommand(
793 description = "Building and installing GNU tar",
794 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
795 env = MakeEnv(tryccache=True),
800 factory.addStep(ShellCommand(
802 description = "Populating dl/",
803 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
806 locks = [dlLock.access('exclusive')],
809 factory.addStep(ShellCommand(
811 description = "Cleaning base-files",
812 command=["make", "package/base-files/clean", "V=s"]
816 factory.addStep(ShellCommand(
818 description = "Building and installing tools",
819 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
820 env = MakeEnv(tryccache=True),
824 factory.addStep(ShellCommand(
826 description = "Building and installing toolchain",
827 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
832 factory.addStep(ShellCommand(
834 description = "Building kmods",
835 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
837 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
841 # find kernel version
842 factory.addStep(SetPropertyFromCommand(
843 name = "kernelversion",
844 property = "kernelversion",
845 description = "Finding the effective Kernel version",
846 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
847 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
850 factory.addStep(ShellCommand(
852 description = "Cleaning up package build",
853 command=["make", "package/cleanup", "V=s"]
856 factory.addStep(ShellCommand(
858 description = "Building packages",
859 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
861 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
865 # factory.addStep(IfBuiltinShellCommand(
866 factory.addStep(ShellCommand(
868 description = "Installing packages",
869 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
874 factory.addStep(ShellCommand(
876 description = "Indexing packages",
877 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
882 if enable_kmod_archive:
883 # embed kmod repository. Must happen before 'images'
885 # find rootfs staging directory
886 factory.addStep(SetPropertyFromCommand(
888 property = "stageroot",
889 description = "Finding the rootfs staging directory",
890 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
891 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
894 factory.addStep(ShellCommand(
896 description = "Creating file overlay directory",
897 command=["mkdir", "-p", "files/etc/opkg"],
901 factory.addStep(ShellCommand(
903 description = "Embedding kmod repository configuration",
904 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
905 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
909 #factory.addStep(IfBuiltinShellCommand(
910 factory.addStep(ShellCommand(
912 description = "Building and installing images",
913 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
918 factory.addStep(ShellCommand(
920 description = "Generating config.seed",
921 command=["make", "-j1", "diffconfig", "V=s"],
926 factory.addStep(ShellCommand(
928 description = "Calculating checksums",
929 command=["make", "-j1", "checksum", "V=s"],
934 if enable_kmod_archive:
935 factory.addStep(ShellCommand(
937 description = "Creating kmod directory",
938 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
942 factory.addStep(ShellCommand(
943 name = "kmodprepare",
944 description = "Preparing kmod archive",
945 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
946 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
947 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
951 factory.addStep(ShellCommand(
953 description = "Indexing kmod archive",
954 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
955 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
961 if ini.has_option("gpg", "key") or usign_key is not None:
962 factory.addStep(MasterShellCommand(
963 name = "signprepare",
964 description = "Preparing temporary signing directory",
965 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
969 factory.addStep(ShellCommand(
971 description = "Packing files to sign",
972 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
976 factory.addStep(FileUpload(
977 slavesrc = "sign.tar.gz",
978 masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
982 factory.addStep(MasterShellCommand(
984 description = "Signing files",
985 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
986 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
990 factory.addStep(FileDownload(
991 name = "dlsigntargz",
992 mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
993 slavedest = "sign.tar.gz",
997 factory.addStep(ShellCommand(
999 description = "Unpacking signed files",
1000 command = ["tar", "-xzf", "sign.tar.gz"],
1001 haltOnFailure = True
1005 factory.addStep(ShellCommand(
1006 name = "dirprepare",
1007 description = "Preparing upload directory structure",
1008 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1009 haltOnFailure = True
1012 factory.addStep(ShellCommand(
1013 name = "linkprepare",
1014 description = "Preparing repository symlink",
1015 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
1016 doStepIf = IsNoMasterBuild,
1017 haltOnFailure = True
1020 if enable_kmod_archive:
1021 factory.addStep(ShellCommand(
1022 name = "kmoddirprepare",
1023 description = "Preparing kmod archive upload directory",
1024 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1025 haltOnFailure = True
1028 factory.addStep(ShellCommand(
1030 description = "Uploading directory structure",
1031 command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
1032 env={'RSYNC_PASSWORD': rsync_bin_key},
1033 haltOnFailure = True,
1037 # download remote sha256sums to 'target-sha256sums'
1038 factory.addStep(ShellCommand(
1039 name = "target-sha256sums",
1040 description = "Fetching remote sha256sums for target",
1041 command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
1042 env={'RSYNC_PASSWORD': rsync_bin_key},
1044 haltOnFailure = False,
1045 flunkOnFailure = False,
1046 warnOnFailure = False,
1049 # build list of files to upload
1050 factory.addStep(FileDownload(
1051 name = "dlsha2rsyncpl",
1052 mastersrc = scripts_dir + '/sha2rsync.pl',
1053 slavedest = "../sha2rsync.pl",
1057 factory.addStep(ShellCommand(
1059 description = "Building list of files to upload",
1060 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1061 haltOnFailure = True,
1064 factory.addStep(FileDownload(
1065 name = "dlrsync.sh",
1066 mastersrc = scripts_dir + '/rsync.sh',
1067 slavedest = "../rsync.sh",
1071 # upload new files and update existing ones
1072 factory.addStep(ShellCommand(
1073 name = "targetupload",
1074 description = "Uploading target files",
1075 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1076 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1077 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1078 env={'RSYNC_PASSWORD': rsync_bin_key},
1079 haltOnFailure = True,
1083 # delete files which don't exist locally
1084 factory.addStep(ShellCommand(
1085 name = "targetprune",
1086 description = "Pruning target files",
1087 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1088 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1089 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1090 env={'RSYNC_PASSWORD': rsync_bin_key},
1091 haltOnFailure = True,
1095 if enable_kmod_archive:
1096 factory.addStep(ShellCommand(
1097 name = "kmodupload",
1098 description = "Uploading kmod archive",
1099 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
1100 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1101 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1102 env={'RSYNC_PASSWORD': rsync_bin_key},
1103 haltOnFailure = True,
1107 if rsync_src_url is not None:
1108 factory.addStep(ShellCommand(
1109 name = "sourcelist",
1110 description = "Finding source archives to upload",
1111 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1112 haltOnFailure = True
1115 factory.addStep(ShellCommand(
1116 name = "sourceupload",
1117 description = "Uploading source archives",
1118 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
1119 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1120 env={'RSYNC_PASSWORD': rsync_src_key},
1121 haltOnFailure = True,
1126 factory.addStep(ShellCommand(
1127 name = "packageupload",
1128 description = "Uploading package files",
1129 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1130 env={'RSYNC_PASSWORD': rsync_bin_key},
1131 haltOnFailure = False,
1137 factory.addStep(ShellCommand(
1139 description = "Uploading logs",
1140 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1141 env={'RSYNC_PASSWORD': rsync_bin_key},
1142 haltOnFailure = False,
1147 factory.addStep(ShellCommand(
1149 description = "Reporting disk usage",
1150 command=["df", "-h", "."],
1151 env={'LC_ALL': 'C'},
1152 haltOnFailure = False,
1156 factory.addStep(ShellCommand(
1157 name = "ccachestat",
1158 description = "Reporting ccache stats",
1159 command=["ccache", "-s"],
1160 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1161 want_stderr = False,
1162 haltOnFailure = False,
1163 flunkOnFailure = False,
1164 warnOnFailure = False,
1168 from buildbot.config import BuilderConfig
1170 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1173 ####### STATUS TARGETS
1175 # 'status' is a list of Status Targets. The results of each build will be
1176 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1177 # including web pages, email senders, and IRC bots.
1181 from buildbot.status import html
1182 from buildbot.status.web import authz, auth
1184 if ini.has_option("phase1", "status_bind"):
1185 if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
1186 authz_cfg=authz.Authz(
1187 # change any of these to True to enable; see the manual for more
1189 auth=auth.BasicAuth([(ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))]),
1190 gracefulShutdown = 'auth',
1191 forceBuild = 'auth', # use this to test your slave once it is set up
1192 forceAllBuilds = 'auth',
1193 pingBuilder = False,
1195 stopAllBuilds = 'auth',
1196 cancelPendingBuild = 'auth',
1198 c['status'].append(html.WebStatus(http_port=ini.get("phase1", "status_bind"), authz=authz_cfg))
1200 c['status'].append(html.WebStatus(http_port=ini.get("phase1", "status_bind")))
1203 from buildbot.status import words
1205 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1206 irc_host = ini.get("irc", "host")
1208 irc_chan = ini.get("irc", "channel")
1209 irc_nick = ini.get("irc", "nickname")
1212 if ini.has_option("irc", "port"):
1213 irc_port = ini.getint("irc", "port")
1215 if ini.has_option("irc", "password"):
1216 irc_pass = ini.get("irc", "password")
1218 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1219 channels = [{ "channel": irc_chan }],
1222 'successToFailure': 1,
1223 'failureToSuccess': 1
1227 c['status'].append(irc)
1232 # This specifies what database buildbot uses to store its state. You can leave
1233 # this at its default for all but the largest installations.
1234 'db_url' : "sqlite:///state.sqlite",