2 # ex: set syntax=python:
9 from buildbot import locks
11 # This is a sample buildmaster config file. It must be installed as
12 # 'master.cfg' in your buildmaster's base directory.
14 ini = ConfigParser.ConfigParser()
15 ini.read("./config.ini")
17 # This is the dictionary that the buildmaster pays attention to. We also use
18 # a shorter alias to save typing.
19 c = BuildmasterConfig = {}
21 ####### PROJECT IDENTITY
23 # the 'title' string will appear at the top of this buildbot
24 # installation's html.WebStatus home page (linked to the
25 # 'titleURL') and is embedded in the title of the waterfall HTML page.
27 c['title'] = ini.get("general", "title")
28 c['titleURL'] = ini.get("general", "title_url")
30 # the 'buildbotURL' string should point to the location where the buildbot's
31 # internal web server (usually the html.WebStatus page) is visible. This
32 # typically uses the port number set in the Waterfall 'status' entry, but
33 # with an externally-visible host name which the buildbot cannot figure out
36 c['buildbotURL'] = ini.get("general", "buildbot_url")
40 # The 'slaves' list defines the set of recognized buildslaves. Each element is
41 # a BuildSlave object, specifying a unique slave name and password. The same
42 # slave name and password must be configured on the slave.
43 from buildbot.buildslave import BuildSlave
47 if ini.has_option("general", "port"):
48 slave_port = ini.getint("general", "port")
53 for section in ini.sections():
54 if section.startswith("slave "):
55 if ini.has_option(section, "name") and ini.has_option(section, "password"):
56 sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
57 name = ini.get(section, "name")
58 password = ini.get(section, "password")
60 if ini.has_option(section, "builds"):
61 max_builds = ini.getint(section, "builds")
62 sl_props['max_builds'] = max_builds
63 if ini.has_option(section, "cleanup"):
64 sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
65 if ini.has_option(section, "dl_lock"):
66 lockname = ini.get(section, "dl_lock")
67 sl_props['dl_lock'] = lockname
68 if lockname not in NetLocks:
69 NetLocks[lockname] = locks.MasterLock(lockname)
70 if ini.has_option(section, "ul_lock"):
71 lockname = ini.get(section, "dl_lock")
72 sl_props['ul_lock'] = lockname
73 if lockname not in NetLocks:
74 NetLocks[lockname] = locks.MasterLock(lockname)
75 if ini.has_option(section, "shared_wd"):
76 shared_wd = ini.getboolean(section, "shared_wd")
77 sl_props['shared_wd'] = shared_wd
78 if shared_wd and (max_builds != 1):
79 raise ValueError('max_builds must be 1 with shared workdir!')
80 c['slaves'].append(BuildSlave(name, password, max_builds = max_builds, properties = sl_props))
82 # 'slavePortnum' defines the TCP port to listen on for connections from slaves.
83 # This must match the value configured into the buildslaves (with their
85 c['slavePortnum'] = slave_port
88 c['mergeRequests'] = True
90 # Reduce amount of backlog data
91 c['buildHorizon'] = 30
96 home_dir = os.path.abspath(ini.get("general", "homedir"))
97 scripts_dir = os.path.abspath("../scripts")
108 if ini.has_option("general", "expire"):
109 tree_expire = ini.getint("general", "expire")
111 if ini.has_option("general", "other_builds"):
112 other_builds = ini.getint("general", "other_builds")
114 if ini.has_option("general", "cc_version"):
115 cc_version = ini.get("general", "cc_version").split()
116 if len(cc_version) == 1:
117 cc_version = ["eq", cc_version[0]]
119 if ini.has_option("general", "git_ssh"):
120 git_ssh = ini.getboolean("general", "git_ssh")
122 if ini.has_option("general", "git_ssh_key"):
123 git_ssh_key = ini.get("general", "git_ssh_key")
127 repo_url = ini.get("repo", "url")
128 repo_branch = "master"
130 if ini.has_option("repo", "branch"):
131 repo_branch = ini.get("repo", "branch")
133 rsync_bin_url = ini.get("rsync", "binary_url")
134 rsync_bin_key = ini.get("rsync", "binary_password")
139 if ini.has_option("rsync", "source_url"):
140 rsync_src_url = ini.get("rsync", "source_url")
141 rsync_src_key = ini.get("rsync", "source_password")
143 rsync_defopts = ["-4", "-v", "--timeout=120", "--contimeout=20"]
145 gpg_home = "~/.gnupg"
147 gpg_comment = "Unattended build signature"
148 gpg_passfile = "/dev/null"
150 if ini.has_option("gpg", "home"):
151 gpg_home = ini.get("gpg", "home")
153 if ini.has_option("gpg", "keyid"):
154 gpg_keyid = ini.get("gpg", "keyid")
156 if ini.has_option("gpg", "comment"):
157 gpg_comment = ini.get("gpg", "comment")
159 if ini.has_option("gpg", "passfile"):
160 gpg_passfile = ini.get("gpg", "passfile")
162 enable_kmod_archive = True
168 if not os.path.isdir(home_dir+'/source.git'):
169 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
171 subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
173 findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
174 stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
177 line = findtargets.stdout.readline()
180 ta = line.strip().split(' ')
181 targets.append(ta[0])
184 # the 'change_source' setting tells the buildmaster how it should find out
185 # about source code changes. Here we point to the buildbot clone of pyflakes.
187 from buildbot.changes.gitpoller import GitPoller
188 c['change_source'] = []
189 c['change_source'].append(GitPoller(
191 workdir=home_dir+'/work.git', branch=repo_branch,
196 # Configure the Schedulers, which decide how to react to incoming changes. In this
197 # case, just kick off a 'basebuild' build
199 from buildbot.schedulers.basic import SingleBranchScheduler
200 from buildbot.schedulers.forcesched import ForceScheduler
201 from buildbot.changes import filter
203 c['schedulers'].append(SingleBranchScheduler(
205 change_filter=filter.ChangeFilter(branch=repo_branch),
207 builderNames=targets))
209 c['schedulers'].append(ForceScheduler(
211 builderNames=targets))
215 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
216 # what steps, and which slaves can execute them. Note that any particular build will
217 # only take place on one slave.
219 from buildbot.process.factory import BuildFactory
220 from buildbot.steps.source.git import Git
221 from buildbot.steps.shell import ShellCommand
222 from buildbot.steps.shell import SetPropertyFromCommand
223 from buildbot.steps.transfer import FileUpload
224 from buildbot.steps.transfer import FileDownload
225 from buildbot.steps.transfer import StringDownload
226 from buildbot.steps.master import MasterShellCommand
227 from buildbot.process.properties import Interpolate
228 from buildbot.process import properties
232 [ "tools", "tools/clean" ],
233 [ "chain", "toolchain/clean" ],
234 [ "linux", "target/linux/clean" ],
235 [ "dir", "dirclean" ],
236 [ "dist", "distclean" ]
239 def IsMakeCleanRequested(pattern):
240 def CheckCleanProperty(step):
241 val = step.getProperty("clean")
242 if val and re.match(pattern, val):
247 return CheckCleanProperty
249 def IsCleanupRequested(step):
250 shared_wd = step.getProperty("shared_wd")
253 do_cleanup = step.getProperty("do_cleanup")
259 def IsExpireRequested(step):
260 shared_wd = step.getProperty("shared_wd")
264 return not IsCleanupRequested(step)
266 def IsGitFreshRequested(step):
267 do_cleanup = step.getProperty("do_cleanup")
273 def IsGitCleanRequested(step):
274 return not IsGitFreshRequested(step)
276 def IsTaggingRequested(step):
277 val = step.getProperty("tag")
278 if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
283 def IsNoTaggingRequested(step):
284 return not IsTaggingRequested(step)
286 def IsNoMasterBuild(step):
287 return repo_branch != "master"
289 def GetBaseVersion():
290 if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
291 return repo_branch.split('-')[1]
296 def GetVersionPrefix(props):
297 basever = GetBaseVersion()
298 if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
299 return "%s/" % props["tag"]
300 elif basever != "master":
301 return "%s-SNAPSHOT/" % basever
306 def GetNumJobs(props):
307 if props.hasProperty("max_builds") and props.hasProperty("nproc"):
308 return str(int(props["nproc"]) / (props["max_builds"] + other_builds))
314 if props.hasProperty("cc_command"):
315 return props["cc_command"]
321 if props.hasProperty("cxx_command"):
322 return props["cxx_command"]
328 if props.hasProperty("builddir"):
329 return props["builddir"]
330 elif props.hasProperty("workdir"):
331 return props["workdir"]
336 def GetCCache(props):
337 if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
338 return props["ccache_command"]
342 def GetNextBuild(builder, requests):
344 if r.properties and r.properties.hasProperty("tag"):
348 def MakeEnv(overrides=None, tryccache=False):
350 'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
351 'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
354 env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
355 env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
356 env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
358 env['CC'] = env['CCC']
359 env['CXX'] = env['CCXX']
361 if overrides is not None:
362 env.update(overrides)
366 def NetLockDl(props):
368 if props.hasProperty("dl_lock"):
369 lock = NetLocks[props["dl_lock"]]
371 return [lock.access('exclusive')]
376 def NetLockUl(props):
378 if props.hasProperty("ul_lock"):
379 lock = NetLocks[props["ul_lock"]]
381 return [lock.access('exclusive')]
387 dlLock = locks.SlaveLock("slave_dl")
389 checkBuiltin = re.sub('[\t\n ]+', ' ', """
391 local symbol op path file;
392 for file in $CHANGED_FILES; do
398 while read symbol op path; do
399 case "$symbol" in package-*)
400 symbol="${symbol##*(}";
401 symbol="${symbol%)}";
402 for file in $CHANGED_FILES; do
403 case "$file" in "package/$path/"*)
404 grep -qsx "$symbol=y" .config && return 0
408 done < tmp/.packagedeps;
414 class IfBuiltinShellCommand(ShellCommand):
415 def _quote(self, str):
416 if re.search("[^a-zA-Z0-9/_.-]", str):
417 return "'%s'" %(re.sub("'", "'\"'\"'", str))
420 def setCommand(self, command):
421 if not isinstance(command, (str, unicode)):
422 command = ' '.join(map(self._quote, command))
425 '%s; if checkBuiltin; then %s; else exit 0; fi' %(checkBuiltin, command)
428 def setupEnvironment(self, cmd):
429 slaveEnv = self.slaveEnvironment
433 for request in self.build.requests:
434 for source in request.sources:
435 for change in source.changes:
436 for file in change.files:
437 changedFiles[file] = True
438 fullSlaveEnv = slaveEnv.copy()
439 fullSlaveEnv['CHANGED_FILES'] = ' '.join(changedFiles.keys())
440 cmd.args['env'] = fullSlaveEnv
444 for slave in c['slaves']:
445 slaveNames.append(slave.slavename)
447 for target in targets:
448 ts = target.split('/')
450 factory = BuildFactory()
452 # find number of cores
453 factory.addStep(SetPropertyFromCommand(
456 description = "Finding number of CPUs",
457 command = ["nproc"]))
459 # find gcc and g++ compilers
460 if cc_version is not None:
461 factory.addStep(FileDownload(
462 name = "dlfindbinpl",
463 mastersrc = scripts_dir + '/findbin.pl',
464 slavedest = "../findbin.pl",
467 factory.addStep(SetPropertyFromCommand(
469 property = "cc_command",
470 description = "Finding gcc command",
471 command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
472 haltOnFailure = True))
474 factory.addStep(SetPropertyFromCommand(
476 property = "cxx_command",
477 description = "Finding g++ command",
478 command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
479 haltOnFailure = True))
481 # see if ccache is available
482 factory.addStep(SetPropertyFromCommand(
483 property = "ccache_command",
484 command = ["which", "ccache"],
485 description = "Testing for ccache command",
486 haltOnFailure = False,
487 flunkOnFailure = False,
488 warnOnFailure = False,
491 # expire tree if needed
493 factory.addStep(FileDownload(
495 doStepIf = IsExpireRequested,
496 mastersrc = scripts_dir + '/expire.sh',
497 slavedest = "../expire.sh",
500 factory.addStep(ShellCommand(
502 description = "Checking for build tree expiry",
503 command = ["./expire.sh", str(tree_expire)],
505 haltOnFailure = True,
506 doStepIf = IsExpireRequested,
509 # cleanup.sh if needed
510 factory.addStep(FileDownload(
511 name = "dlcleanupsh",
512 mastersrc = "cleanup.sh",
513 slavedest = "../cleanup.sh",
515 doStepIf = IsCleanupRequested))
517 factory.addStep(ShellCommand(
519 description = "Cleaning previous builds",
520 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "full"],
522 haltOnFailure = True,
523 doStepIf = IsCleanupRequested,
526 factory.addStep(ShellCommand(
528 description = "Cleaning work area",
529 command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:slavename)s"), Interpolate("%(prop:buildername)s"), "single"],
531 haltOnFailure = True,
532 doStepIf = IsCleanupRequested,
535 # user-requested clean targets
536 for tuple in CleanTargetMap:
537 factory.addStep(ShellCommand(
539 description = 'User-requested "make %s"' % tuple[1],
540 command = ["make", tuple[1], "V=s"],
542 doStepIf = IsMakeCleanRequested(tuple[0])
545 # Workaround bug when switching from a checked out tag back to a branch
546 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
547 factory.addStep(ShellCommand(
548 name = "gitcheckout",
549 description = "Ensure that Git HEAD is sane",
550 command = "if [ -d .git ]; then git checkout master; else exit 0; fi",
551 haltOnFailure = True))
553 # check out the source
555 # if repo doesn't exist: 'git clone repourl'
556 # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
557 # 'git fetch -t repourl branch; git reset --hard revision'
558 # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
559 # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
563 branch = repo_branch,
566 haltOnFailure = True,
567 doStepIf = IsGitCleanRequested,
573 branch = repo_branch,
576 haltOnFailure = True,
577 doStepIf = IsGitFreshRequested,
581 factory.addStep(ShellCommand(
583 description = "Fetching Git remote refs",
584 command = ["git", "fetch", "origin", "+refs/heads/%s:refs/remotes/origin/%s" %(repo_branch, repo_branch)],
589 factory.addStep(ShellCommand(
591 description = "Checking out Git tag",
592 command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
593 haltOnFailure = True,
594 doStepIf = IsTaggingRequested
597 # Verify that Git HEAD points to a tag or branch
598 # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
599 factory.addStep(ShellCommand(
601 description = "Ensure that Git HEAD is pointing to a branch or tag",
602 command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
603 haltOnFailure = True))
605 factory.addStep(ShellCommand(
607 description = "Remove tmp folder",
608 command=["rm", "-rf", "tmp/"]))
611 # factory.addStep(ShellCommand(
612 # name = "feedsconf",
613 # description = "Copy the feeds.conf",
614 # command='''cp ~/feeds.conf ./feeds.conf''' ))
617 factory.addStep(ShellCommand(
618 name = "rmfeedlinks",
619 description = "Remove feed symlinks",
620 command=["rm", "-rf", "package/feeds/"]))
622 factory.addStep(StringDownload(
624 s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
625 slavedest = "../ccache_cc.sh",
629 factory.addStep(StringDownload(
631 s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
632 slavedest = "../ccache_cxx.sh",
638 factory.addStep(StringDownload(
639 name = "dlgitclonekey",
641 slavedest = "../git-clone.key",
645 factory.addStep(ShellCommand(
646 name = "patchfeedsconf",
647 description = "Patching feeds.conf",
648 command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
653 factory.addStep(ShellCommand(
654 name = "updatefeeds",
655 description = "Updating feeds",
656 command=["./scripts/feeds", "update"],
657 env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
663 factory.addStep(ShellCommand(
664 name = "rmfeedsconf",
665 description = "Removing feeds.conf",
666 command=["rm", "feeds.conf"],
671 factory.addStep(ShellCommand(
672 name = "installfeeds",
673 description = "Installing feeds",
674 command=["./scripts/feeds", "install", "-a"],
675 env = MakeEnv(tryccache=True),
680 factory.addStep(FileDownload(
681 name = "dlconfigseed",
682 mastersrc = "config.seed",
683 slavedest = ".config",
688 factory.addStep(ShellCommand(
690 description = "Seeding .config",
691 command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
694 factory.addStep(ShellCommand(
696 description = "Removing output directory",
697 command = ["rm", "-rf", "bin/"]
700 factory.addStep(ShellCommand(
702 description = "Populating .config",
703 command = ["make", "defconfig"],
708 factory.addStep(ShellCommand(
710 description = "Checking architecture",
711 command = ["grep", "-sq", "CONFIG_TARGET_%s=y" %(ts[0]), ".config"],
719 factory.addStep(SetPropertyFromCommand(
722 description = "Finding libc suffix",
723 command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
726 factory.addStep(FileDownload(name="dlkeybuild", mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
727 factory.addStep(FileDownload(name="dlkeybuildpub", mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
730 factory.addStep(ShellCommand(
732 description = "Preparing dl/",
733 command = "mkdir -p $HOME/dl && rm -rf ./dl && ln -sf $HOME/dl ./dl",
739 factory.addStep(ShellCommand(
741 description = "Building and installing GNU tar",
742 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
743 env = MakeEnv(tryccache=True),
748 factory.addStep(ShellCommand(
750 description = "Populating dl/",
751 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
754 locks = [dlLock.access('exclusive')],
757 factory.addStep(ShellCommand(
759 description = "Cleaning base-files",
760 command=["make", "package/base-files/clean", "V=s"]
764 factory.addStep(ShellCommand(
766 description = "Building and installing tools",
767 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
768 env = MakeEnv(tryccache=True),
772 factory.addStep(ShellCommand(
774 description = "Building and installing toolchain",
775 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
780 factory.addStep(ShellCommand(
782 description = "Building kmods",
783 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
785 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
789 # find kernel version
790 factory.addStep(SetPropertyFromCommand(
791 name = "kernelversion",
792 property = "kernelversion",
793 description = "Finding the effective Kernel version",
794 command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
795 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
798 factory.addStep(ShellCommand(
800 description = "Cleaning up package build",
801 command=["make", "package/cleanup", "V=s"]
804 factory.addStep(ShellCommand(
806 description = "Building packages",
807 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
809 #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
813 # factory.addStep(IfBuiltinShellCommand(
814 factory.addStep(ShellCommand(
816 description = "Installing packages",
817 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
822 factory.addStep(ShellCommand(
824 description = "Indexing packages",
825 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s"],
830 if enable_kmod_archive:
831 # embed kmod repository. Must happen before 'images'
833 # find rootfs staging directory
834 factory.addStep(SetPropertyFromCommand(
836 property = "stageroot",
837 description = "Finding the rootfs staging directory",
838 command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
839 env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
842 factory.addStep(ShellCommand(
844 description = "Creating file overlay directory",
845 command=["mkdir", "-p", "files/etc/opkg"],
849 factory.addStep(ShellCommand(
851 description = "Embedding kmod repository configuration",
852 command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
853 "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
857 #factory.addStep(IfBuiltinShellCommand(
858 factory.addStep(ShellCommand(
860 description = "Building and installing images",
861 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
866 factory.addStep(ShellCommand(
868 description = "Generating config.seed",
869 command=["make", "-j1", "diffconfig", "V=s"],
874 factory.addStep(ShellCommand(
876 description = "Calculating checksums",
877 command=["make", "-j1", "checksum", "V=s"],
882 if enable_kmod_archive:
883 factory.addStep(ShellCommand(
885 description = "Creating kmod directory",
886 command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
890 factory.addStep(ShellCommand(
891 name = "kmodprepare",
892 description = "Preparing kmod archive",
893 command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
894 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
895 Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
899 factory.addStep(ShellCommand(
901 description = "Indexing kmod archive",
902 command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s",
903 Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
909 if gpg_keyid is not None:
910 factory.addStep(MasterShellCommand(
911 name = "signprepare",
912 description = "Preparing temporary signing directory",
913 command = ["mkdir", "-p", "%s/signing" %(home_dir)],
917 factory.addStep(ShellCommand(
919 description = "Packing files to sign",
920 command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
924 factory.addStep(FileUpload(
925 slavesrc = "sign.tar.gz",
926 masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
930 factory.addStep(MasterShellCommand(
932 description = "Signing files",
933 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
934 env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
938 factory.addStep(FileDownload(
939 name = "dlsigntargz",
940 mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
941 slavedest = "sign.tar.gz",
945 factory.addStep(ShellCommand(
947 description = "Unpacking signed files",
948 command = ["tar", "-xzf", "sign.tar.gz"],
953 factory.addStep(ShellCommand(
955 description = "Preparing upload directory structure",
956 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
960 factory.addStep(ShellCommand(
961 name = "linkprepare",
962 description = "Preparing repository symlink",
963 command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
964 doStepIf = IsNoMasterBuild,
968 if enable_kmod_archive:
969 factory.addStep(ShellCommand(
970 name = "kmoddirprepare",
971 description = "Preparing kmod archive upload directory",
972 command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
976 factory.addStep(ShellCommand(
978 description = "Uploading directory structure",
979 command = ["rsync", "-az"] + rsync_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
980 env={'RSYNC_PASSWORD': rsync_bin_key},
981 haltOnFailure = True,
985 # download remote sha256sums to 'target-sha256sums'
986 factory.addStep(ShellCommand(
987 name = "target-sha256sums",
988 description = "Fetching remote sha256sums for target",
989 command = ["rsync", "-z"] + rsync_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
990 env={'RSYNC_PASSWORD': rsync_bin_key},
992 haltOnFailure = False,
993 flunkOnFailure = False,
994 warnOnFailure = False,
997 # build list of files to upload
998 factory.addStep(FileDownload(
999 name = "dlsha2rsyncpl",
1000 mastersrc = scripts_dir + '/sha2rsync.pl',
1001 slavedest = "../sha2rsync.pl",
1005 factory.addStep(ShellCommand(
1007 description = "Building list of files to upload",
1008 command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
1009 haltOnFailure = True,
1012 factory.addStep(FileDownload(
1013 name = "dlrsync.sh",
1014 mastersrc = scripts_dir + '/rsync.sh',
1015 slavedest = "../rsync.sh",
1019 # upload new files and update existing ones
1020 factory.addStep(ShellCommand(
1021 name = "targetupload",
1022 description = "Uploading target files",
1023 command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1024 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1025 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1026 env={'RSYNC_PASSWORD': rsync_bin_key},
1027 haltOnFailure = True,
1031 # delete files which don't exist locally
1032 factory.addStep(ShellCommand(
1033 name = "targetprune",
1034 description = "Pruning target files",
1035 command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1036 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
1037 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1038 env={'RSYNC_PASSWORD': rsync_bin_key},
1039 haltOnFailure = True,
1043 if enable_kmod_archive:
1044 factory.addStep(ShellCommand(
1045 name = "kmodupload",
1046 description = "Uploading kmod archive",
1047 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_defopts +
1048 ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
1049 Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
1050 env={'RSYNC_PASSWORD': rsync_bin_key},
1051 haltOnFailure = True,
1055 if rsync_src_url is not None:
1056 factory.addStep(ShellCommand(
1057 name = "sourcelist",
1058 description = "Finding source archives to upload",
1059 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
1060 haltOnFailure = True
1063 factory.addStep(ShellCommand(
1064 name = "sourceupload",
1065 description = "Uploading source archives",
1066 command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_defopts +
1067 [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:slavename)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
1068 env={'RSYNC_PASSWORD': rsync_src_key},
1069 haltOnFailure = True,
1074 factory.addStep(ShellCommand(
1075 name = "packageupload",
1076 description = "Uploading package files",
1077 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
1078 env={'RSYNC_PASSWORD': rsync_bin_key},
1079 haltOnFailure = False,
1085 factory.addStep(ShellCommand(
1087 description = "Uploading logs",
1088 command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
1089 env={'RSYNC_PASSWORD': rsync_bin_key},
1090 haltOnFailure = False,
1095 factory.addStep(ShellCommand(
1097 description = "Reporting disk usage",
1098 command=["df", "-h", "."],
1099 env={'LC_ALL': 'C'},
1100 haltOnFailure = False,
1104 factory.addStep(ShellCommand(
1105 name = "ccachestat",
1106 description = "Reporting ccache stats",
1107 command=["ccache", "-s"],
1108 env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
1109 want_stderr = False,
1110 haltOnFailure = False,
1111 flunkOnFailure = False,
1112 warnOnFailure = False,
1116 from buildbot.config import BuilderConfig
1118 c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
1121 ####### STATUS TARGETS
1123 # 'status' is a list of Status Targets. The results of each build will be
1124 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
1125 # including web pages, email senders, and IRC bots.
1129 from buildbot.status import html
1130 from buildbot.status.web import authz, auth
1132 if ini.has_option("status", "bind"):
1133 if ini.has_option("status", "user") and ini.has_option("status", "password"):
1134 authz_cfg=authz.Authz(
1135 # change any of these to True to enable; see the manual for more
1137 auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
1138 gracefulShutdown = 'auth',
1139 forceBuild = 'auth', # use this to test your slave once it is set up
1140 forceAllBuilds = 'auth',
1141 pingBuilder = False,
1143 stopAllBuilds = 'auth',
1144 cancelPendingBuild = 'auth',
1146 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
1148 c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
1151 from buildbot.status import words
1153 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
1154 irc_host = ini.get("irc", "host")
1156 irc_chan = ini.get("irc", "channel")
1157 irc_nick = ini.get("irc", "nickname")
1160 if ini.has_option("irc", "port"):
1161 irc_port = ini.getint("irc", "port")
1163 if ini.has_option("irc", "password"):
1164 irc_pass = ini.get("irc", "password")
1166 irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
1167 channels = [{ "channel": irc_chan }],
1170 'successToFailure': 1,
1171 'failureToSuccess': 1
1175 c['status'].append(irc)
1180 # This specifies what database buildbot uses to store its state. You can leave
1181 # this at its default for all but the largest installations.
1182 'db_url' : "sqlite:///state.sqlite",