phase1: add JSON merge step
[buildbot.git] / phase1 / master.cfg
index fb8f488a0eaee1e8975909b4698c591043432801..3ba7a1606e89b095b10555e703ea96e93295deec 100644 (file)
 
 import os
 import re
+import base64
 import subprocess
-import ConfigParser
+import configparser
 
 from buildbot import locks
+from buildbot.changes import filter
+from buildbot.changes.gitpoller import GitPoller
+from buildbot.config import BuilderConfig
+from buildbot.plugins import reporters
+from buildbot.plugins import schedulers
+from buildbot.plugins import steps
+from buildbot.plugins import util
+from buildbot.process import properties
+from buildbot.process.factory import BuildFactory
+from buildbot.process.properties import Interpolate
+from buildbot.process.properties import Property
+from buildbot.schedulers.basic import SingleBranchScheduler
+from buildbot.schedulers.forcesched import BaseParameter
+from buildbot.schedulers.forcesched import ForceScheduler
+from buildbot.schedulers.forcesched import ValidationError
+from buildbot.steps.master import MasterShellCommand
+from buildbot.steps.shell import SetPropertyFromCommand
+from buildbot.steps.shell import ShellCommand
+from buildbot.steps.source.git import Git
+from buildbot.steps.transfer import FileDownload
+from buildbot.steps.transfer import FileUpload
+from buildbot.steps.transfer import StringDownload
+from buildbot.worker import Worker
+
 
 # This is a sample buildmaster config file. It must be installed as
 # 'master.cfg' in your buildmaster's base directory.
 
-ini = ConfigParser.ConfigParser()
-ini.read("./config.ini")
+ini = configparser.ConfigParser()
+ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
 
 # This is the dictionary that the buildmaster pays attention to. We also use
 # a shorter alias to save typing.
 c = BuildmasterConfig = {}
 
+####### PROJECT IDENTITY
+
+# the 'title' string will appear at the top of this buildbot
+# installation's html.WebStatus home page (linked to the
+# 'titleURL') and is embedded in the title of the waterfall HTML page.
+
+c['title'] = ini.get("general", "title")
+c['titleURL'] = ini.get("general", "title_url")
+
+# the 'buildbotURL' string should point to the location where the buildbot's
+# internal web server (usually the html.WebStatus page) is visible. This
+# typically uses the port number set in the Waterfall 'status' entry, but
+# with an externally-visible host name which the buildbot cannot figure out
+# without some help.
+
+c['buildbotURL'] = ini.get("phase1", "buildbot_url")
+
 ####### BUILDSLAVES
 
-# The 'slaves' list defines the set of recognized buildslaves. Each element is
-# a BuildSlave object, specifying a unique slave name and password.  The same
+# The 'workers' list defines the set of recognized buildslaves. Each element is
+# a Worker object, specifying a unique slave name and password.  The same
 # slave name and password must be configured on the slave.
-from buildbot.buildslave import BuildSlave
 
 slave_port = 9989
 
-if ini.has_option("general", "port"):
-       slave_port = ini.getint("general", "port")
+if ini.has_option("phase1", "port"):
+       slave_port = ini.get("phase1", "port")
 
-c['slaves'] = []
-max_builds = dict()
+c['workers'] = []
+NetLocks = dict()
 
 for section in ini.sections():
        if section.startswith("slave "):
-               if ini.has_option(section, "name") and ini.has_option(section, "password"):
+               if ini.has_option(section, "name") and ini.has_option(section, "password") and \
+                  (not ini.has_option(section, "phase") or ini.getint(section, "phase") == 1):
+                       sl_props = { 'dl_lock':None, 'ul_lock':None, 'do_cleanup':False, 'max_builds':1, 'shared_wd':False }
                        name = ini.get(section, "name")
                        password = ini.get(section, "password")
-                       max_builds[name] = 1
+                       max_builds = 1
                        if ini.has_option(section, "builds"):
-                               max_builds[name] = ini.getint(section, "builds")
-                       c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name]))
-
-# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
+                               max_builds = ini.getint(section, "builds")
+                               sl_props['max_builds'] = max_builds
+                               if max_builds == 1:
+                                       sl_props['shared_wd'] = True
+                       if ini.has_option(section, "cleanup"):
+                               sl_props['do_cleanup'] = ini.getboolean(section, "cleanup")
+                       if ini.has_option(section, "dl_lock"):
+                               lockname = ini.get(section, "dl_lock")
+                               sl_props['dl_lock'] = lockname
+                               if lockname not in NetLocks:
+                                       NetLocks[lockname] = locks.MasterLock(lockname)
+                       if ini.has_option(section, "ul_lock"):
+                               lockname = ini.get(section, "dl_lock")
+                               sl_props['ul_lock'] = lockname
+                               if lockname not in NetLocks:
+                                       NetLocks[lockname] = locks.MasterLock(lockname)
+                       if ini.has_option(section, "shared_wd"):
+                               shared_wd = ini.getboolean(section, "shared_wd")
+                               sl_props['shared_wd'] = shared_wd
+                               if shared_wd and (max_builds != 1):
+                                       raise ValueError('max_builds must be 1 with shared workdir!')
+                       c['workers'].append(Worker(name, password, max_builds = max_builds, properties = sl_props))
+
+# 'slavePortnum' defines the TCP port to listen on for connections from workers.
 # This must match the value configured into the buildslaves (with their
 # --master option)
-c['slavePortnum'] = slave_port
+c['protocols'] = {'pb': {'port': slave_port}}
 
 # coalesce builds
-c['mergeRequests'] = True
+c['collapseRequests'] = True
 
 # Reduce amount of backlog data
 c['buildHorizon'] = 30
@@ -57,7 +120,8 @@ c['logHorizon'] = 20
 
 ####### CHANGESOURCES
 
-home_dir = os.path.abspath(ini.get("general", "homedir"))
+work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
+scripts_dir = os.path.abspath("../scripts")
 tree_expire = 0
 other_builds = 0
 cc_version = None
@@ -65,17 +129,33 @@ cc_version = None
 cc_command = "gcc"
 cxx_command = "g++"
 
-if ini.has_option("general", "expire"):
-       tree_expire = ini.getint("general", "expire")
+config_seed = ""
 
-if ini.has_option("general", "other_builds"):
-       other_builds = ini.getint("general", "other_builds")
+git_ssh = False
+git_ssh_key = None
 
-if ini.has_option("general", "cc_version"):
-       cc_version = ini.get("general", "cc_version").split()
+if ini.has_option("phase1", "expire"):
+       tree_expire = ini.getint("phase1", "expire")
+
+if ini.has_option("phase1", "other_builds"):
+       other_builds = ini.getint("phase1", "other_builds")
+
+if ini.has_option("phase1", "cc_version"):
+       cc_version = ini.get("phase1", "cc_version").split()
        if len(cc_version) == 1:
                cc_version = ["eq", cc_version[0]]
 
+if ini.has_option("general", "git_ssh"):
+       git_ssh = ini.getboolean("general", "git_ssh")
+
+if ini.has_option("general", "git_ssh_key"):
+       git_ssh_key = ini.get("general", "git_ssh_key")
+else:
+       git_ssh = False
+
+if ini.has_option("phase1", "config_seed"):
+       config_seed = ini.get("phase1", "config_seed")
+
 repo_url = ini.get("repo", "url")
 repo_branch = "master"
 
@@ -84,59 +164,60 @@ if ini.has_option("repo", "branch"):
 
 rsync_bin_url = ini.get("rsync", "binary_url")
 rsync_bin_key = ini.get("rsync", "binary_password")
+rsync_bin_defopts = ["-v", "-4", "--timeout=120"]
+
+if rsync_bin_url.find("::") > 0 or rsync_bin_url.find("rsync://") == 0:
+       rsync_bin_defopts += ["--contimeout=20"]
 
 rsync_src_url = None
 rsync_src_key = None
+rsync_src_defopts = ["-v", "-4", "--timeout=120"]
 
 if ini.has_option("rsync", "source_url"):
        rsync_src_url = ini.get("rsync", "source_url")
        rsync_src_key = ini.get("rsync", "source_password")
 
-gpg_home = "~/.gnupg"
-gpg_keyid = None
-gpg_comment = "Unattended build signature"
-gpg_passfile = "/dev/null"
+       if rsync_src_url.find("::") > 0 or rsync_src_url.find("rsync://") == 0:
+               rsync_src_defopts += ["--contimeout=20"]
 
-if ini.has_option("gpg", "home"):
-       gpg_home = ini.get("gpg", "home")
+usign_key = None
+usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
 
-if ini.has_option("gpg", "keyid"):
-       gpg_keyid = ini.get("gpg", "keyid")
+if ini.has_option("usign", "key"):
+       usign_key = ini.get("usign", "key")
 
-if ini.has_option("gpg", "comment"):
-       gpg_comment = ini.get("gpg", "comment")
+if ini.has_option("usign", "comment"):
+       usign_comment = ini.get("usign", "comment")
 
-if ini.has_option("gpg", "passfile"):
-       gpg_passfile = ini.get("gpg", "passfile")
+enable_kmod_archive = True
 
 
 # find targets
 targets = [ ]
 
-if not os.path.isdir(home_dir+'/source.git'):
-       subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, home_dir+'/source.git'])
+if not os.path.isdir(work_dir+'/source.git'):
+       subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
 else:
-       subprocess.call(["git", "pull"], cwd = home_dir+'/source.git')
+       subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
 
-findtargets = subprocess.Popen([home_dir+'/dumpinfo.pl', 'targets'],
-       stdout = subprocess.PIPE, cwd = home_dir+'/source.git')
+findtargets = subprocess.Popen([scripts_dir + '/dumpinfo.pl', 'targets'],
+       stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
 
 while True:
        line = findtargets.stdout.readline()
        if not line:
                break
-       ta = line.strip().split(' ')
+       ta = line.decode().strip().split(' ')
        targets.append(ta[0])
 
 
 # the 'change_source' setting tells the buildmaster how it should find out
 # about source code changes.  Here we point to the buildbot clone of pyflakes.
 
-from buildbot.changes.gitpoller import GitPoller
 c['change_source'] = []
 c['change_source'].append(GitPoller(
        repo_url,
-       workdir=home_dir+'/work.git', branch=repo_branch,
+       workdir=work_dir+'/work.git', branch=repo_branch,
        pollinterval=300))
 
 ####### SCHEDULERS
@@ -144,36 +225,108 @@ c['change_source'].append(GitPoller(
 # Configure the Schedulers, which decide how to react to incoming changes.  In this
 # case, just kick off a 'basebuild' build
 
-from buildbot.schedulers.basic import SingleBranchScheduler
-from buildbot.schedulers.forcesched import ForceScheduler
-from buildbot.changes import filter
+class TagChoiceParameter(BaseParameter):
+       spec_attributes = ["strict", "choices"]
+       type = "list"
+       strict = True
+
+       def __init__(self, name, label=None, **kw):
+               super().__init__(name, label, **kw)
+               self._choice_list = []
+
+       @property
+       def choices(self):
+               taglist = []
+               basever = re.search(r'-([0-9]+\.[0-9]+)$', repo_branch)
+
+               if basever:
+                       findtags = subprocess.Popen(
+                               ['git', 'ls-remote', '--tags', repo_url],
+                               stdout = subprocess.PIPE)
+
+                       while True:
+                               line = findtags.stdout.readline()
+
+                               if not line:
+                                       break
+
+                               tagver = re.search(r'\brefs/tags/v([0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?)$', line.decode().strip())
+
+                               if tagver and tagver[1].find(basever[1]) == 0:
+                                       taglist.append(tagver[1])
+
+               taglist.sort(reverse=True, key=lambda tag: tag if re.search(r'-rc[0-9]+$', tag) else tag + '-z')
+               taglist.insert(0, '')
+
+               self._choice_list = taglist
+
+               return self._choice_list
+
+       def parse_from_arg(self, s):
+               if self.strict and s not in self._choice_list:
+                       raise ValidationError("'%s' does not belong to list of available choices '%s'" % (s, self._choice_list))
+               return s
+
 c['schedulers'] = []
 c['schedulers'].append(SingleBranchScheduler(
-       name="all",
-       change_filter=filter.ChangeFilter(branch=repo_branch),
-       treeStableTimer=60,
-       builderNames=targets))
+       name            = "all",
+       change_filter   = filter.ChangeFilter(branch=repo_branch),
+       treeStableTimer = 60,
+       builderNames    = targets))
 
 c['schedulers'].append(ForceScheduler(
-       name="force",
-       builderNames=targets))
+       name         = "force",
+       buttonName   = "Force builds",
+       label        = "Force build details",
+       builderNames = [ "00_force_build" ],
+
+       codebases = [
+               util.CodebaseParameter(
+                       "",
+                       label      = "Repository",
+                       branch     = util.FixedParameter(name = "branch",     default = ""),
+                       revision   = util.FixedParameter(name = "revision",   default = ""),
+                       repository = util.FixedParameter(name = "repository", default = ""),
+                       project    = util.FixedParameter(name = "project",    default = "")
+               )
+       ],
+
+       reason = util.StringParameter(
+               name     = "reason",
+               label    = "Reason",
+               default  = "Trigger build",
+               required = True,
+               size     = 80
+       ),
+
+       properties = [
+               util.NestedParameter(
+                       name="options",
+                       label="Build Options",
+                       layout="vertical",
+                       fields=[
+                               util.ChoiceStringParameter(
+                                       name    = "target",
+                                       label   = "Build target",
+                                       default = "all",
+                                       choices = [ "all" ] + targets
+                               ),
+                               TagChoiceParameter(
+                                       name    = "tag",
+                                       label   = "Build tag",
+                                       default = ""
+                               )
+                       ]
+               )
+       ]
+))
 
 ####### BUILDERS
 
 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
-# what steps, and which slaves can execute them.  Note that any particular build will
+# what steps, and which workers can execute them.  Note that any particular build will
 # only take place on one slave.
 
-from buildbot.process.factory import BuildFactory
-from buildbot.steps.source.git import Git
-from buildbot.steps.shell import ShellCommand
-from buildbot.steps.shell import SetProperty
-from buildbot.steps.transfer import FileUpload
-from buildbot.steps.transfer import FileDownload
-from buildbot.steps.master import MasterShellCommand
-from buildbot.process.properties import WithProperties
-
-
 CleanTargetMap = [
        [ "tools",      "tools/clean"                   ],
        [ "chain",      "toolchain/clean"               ],
@@ -182,7 +335,7 @@ CleanTargetMap = [
        [ "dist",       "distclean"                             ]
 ]
 
-def IsCleanRequested(pattern):
+def IsMakeCleanRequested(pattern):
        def CheckCleanProperty(step):
                val = step.getProperty("clean")
                if val and re.match(pattern, val):
@@ -192,9 +345,37 @@ def IsCleanRequested(pattern):
 
        return CheckCleanProperty
 
+def IsSharedWorkdir(step):
+       return bool(step.getProperty("shared_wd"))
+
+def IsCleanupRequested(step):
+       if IsSharedWorkdir(step):
+               return False
+       do_cleanup = step.getProperty("do_cleanup")
+       if do_cleanup:
+               return True
+       else:
+               return False
+
+def IsExpireRequested(step):
+       if IsSharedWorkdir(step):
+               return False
+       else:
+               return not IsCleanupRequested(step)
+
+def IsGitFreshRequested(step):
+       do_cleanup = step.getProperty("do_cleanup")
+       if do_cleanup:
+               return True
+       else:
+               return False
+
+def IsGitCleanRequested(step):
+       return not IsGitFreshRequested(step)
+
 def IsTaggingRequested(step):
        val = step.getProperty("tag")
-       if val and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
+       if val and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", val):
                return True
        else:
                return False
@@ -205,39 +386,44 @@ def IsNoTaggingRequested(step):
 def IsNoMasterBuild(step):
        return repo_branch != "master"
 
-def GetBaseVersion(props):
-       if re.match("^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
+def GetBaseVersion():
+       if re.match(r"^[^-]+-[0-9]+\.[0-9]+$", repo_branch):
                return repo_branch.split('-')[1]
        else:
                return "master"
 
+@properties.renderer
 def GetVersionPrefix(props):
-       basever = GetBaseVersion(props)
-       if props.hasProperty("tag") and re.match("^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
+       basever = GetBaseVersion()
+       if props.hasProperty("tag") and re.match(r"^[0-9]+\.[0-9]+\.[0-9]+(?:-rc[0-9]+)?$", props["tag"]):
                return "%s/" % props["tag"]
        elif basever != "master":
                return "%s-SNAPSHOT/" % basever
        else:
                return ""
 
+@properties.renderer
 def GetNumJobs(props):
-       if props.hasProperty("slavename") and props.hasProperty("nproc"):
-               return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1)
+       if props.hasProperty("max_builds") and props.hasProperty("nproc"):
+               return str(int(int(props["nproc"]) / (props["max_builds"] + other_builds)))
        else:
-               return 1
+               return "1"
 
+@properties.renderer
 def GetCC(props):
        if props.hasProperty("cc_command"):
                return props["cc_command"]
        else:
                return "gcc"
 
+@properties.renderer
 def GetCXX(props):
        if props.hasProperty("cxx_command"):
                return props["cxx_command"]
        else:
                return "g++"
 
+@properties.renderer
 def GetCwd(props):
        if props.hasProperty("builddir"):
                return props["builddir"]
@@ -246,27 +432,92 @@ def GetCwd(props):
        else:
                return "/"
 
+@properties.renderer
+def GetCCache(props):
+       if props.hasProperty("ccache_command") and "ccache" in props["ccache_command"]:
+               return props["ccache_command"]
+       else:
+               return ""
+
 def GetNextBuild(builder, requests):
        for r in requests:
                if r.properties and r.properties.hasProperty("tag"):
                        return r
        return requests[0]
 
-def MakeEnv(overrides=None):
+def MakeEnv(overrides=None, tryccache=False):
        env = {
-               'CC': WithProperties("%(cc)s", cc=GetCC),
-               'CXX': WithProperties("%(cxx)s", cxx=GetCXX),
-               'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)
+               'CCC': Interpolate("%(kw:cc)s", cc=GetCC),
+               'CCXX': Interpolate("%(kw:cxx)s", cxx=GetCXX),
        }
+       if tryccache:
+               env['CC'] = Interpolate("%(kw:cwd)s/ccache_cc.sh", cwd=GetCwd)
+               env['CXX'] = Interpolate("%(kw:cwd)s/ccache_cxx.sh", cwd=GetCwd)
+               env['CCACHE'] = Interpolate("%(kw:ccache)s", ccache=GetCCache)
+       else:
+               env['CC'] = env['CCC']
+               env['CXX'] = env['CCXX']
+               env['CCACHE'] = ''
        if overrides is not None:
                env.update(overrides)
        return env
 
+@properties.renderer
+def NetLockDl(props):
+       lock = None
+       if props.hasProperty("dl_lock"):
+               lock = NetLocks[props["dl_lock"]]
+       if lock is not None:
+               return [lock.access('exclusive')]
+       else:
+               return []
+
+@properties.renderer
+def NetLockUl(props):
+       lock = None
+       if props.hasProperty("ul_lock"):
+               lock = NetLocks[props["ul_lock"]]
+       if lock is not None:
+               return [lock.access('exclusive')]
+       else:
+               return []
+
+@util.renderer
+def TagPropertyValue(props):
+       if props.hasProperty("options"):
+               options = props.getProperty("options")
+               if type(options) is dict:
+                       return options.get("tag")
+       return None
+
+def IsTargetSelected(target):
+       def CheckTargetProperty(step):
+               try:
+                       options = step.getProperty("options")
+                       if type(options) is dict:
+                               selected_target = options.get("target", "all")
+                               if selected_target != "all" and selected_target != target:
+                                       return False
+               except KeyError:
+                       pass
+
+               return True
+
+       return CheckTargetProperty
+
+def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
+       try:
+               seckey = base64.b64decode(seckey)
+       except:
+               return None
+
+       return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
+               base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
+
 
 c['builders'] = []
 
-dlLock = locks.SlaveLock("slave_dl")
-tagLock = locks.MasterLock("make_tag")
+dlLock = locks.WorkerLock("slave_dl")
 
 checkBuiltin = re.sub('[\t\n ]+', ' ', """
        checkBuiltin() {
@@ -323,48 +574,84 @@ class IfBuiltinShellCommand(ShellCommand):
 
 slaveNames = [ ]
 
-for slave in c['slaves']:
-       slaveNames.append(slave.slavename)
+for slave in c['workers']:
+       slaveNames.append(slave.workername)
+
+force_factory = BuildFactory()
+
+c['builders'].append(BuilderConfig(
+       name        = "00_force_build",
+       workernames = slaveNames,
+       factory     = force_factory))
 
 for target in targets:
        ts = target.split('/')
 
        factory = BuildFactory()
 
+       # setup shared work directory if required
+       factory.addStep(ShellCommand(
+               name = "sharedwd",
+               description = "Setting up shared work directory",
+               command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
+               workdir = ".",
+               haltOnFailure = True,
+               doStepIf = IsSharedWorkdir))
+
        # find number of cores
-       factory.addStep(SetProperty(
+       factory.addStep(SetPropertyFromCommand(
                name = "nproc",
                property = "nproc",
                description = "Finding number of CPUs",
                command = ["nproc"]))
 
        # find gcc and g++ compilers
-       if cc_version is not None:
-               factory.addStep(FileDownload(
-                       mastersrc = "findbin.pl",
-                       slavedest = "../findbin.pl",
-                       mode = 0755))
-
-               factory.addStep(SetProperty(
-                       name = "gcc",
-                       property = "cc_command",
-                       description = "Finding gcc command",
-                       command = ["../findbin.pl", "gcc", cc_version[0], cc_version[1]],
-                       haltOnFailure = True))
-
-               factory.addStep(SetProperty(
-                       name = "g++",
-                       property = "cxx_command",
-                       description = "Finding g++ command",
-                       command = ["../findbin.pl", "g++", cc_version[0], cc_version[1]],
-                       haltOnFailure = True))
+       factory.addStep(FileDownload(
+               name = "dlfindbinpl",
+               mastersrc = scripts_dir + '/findbin.pl',
+               workerdest = "../findbin.pl",
+               mode = 0o755))
+
+       factory.addStep(SetPropertyFromCommand(
+               name = "gcc",
+               property = "cc_command",
+               description = "Finding gcc command",
+               command = [
+                       "../findbin.pl", "gcc",
+                       cc_version[0] if cc_version is not None else '',
+                       cc_version[1] if cc_version is not None else ''
+               ],
+               haltOnFailure = True))
+
+       factory.addStep(SetPropertyFromCommand(
+               name = "g++",
+               property = "cxx_command",
+               description = "Finding g++ command",
+               command = [
+                       "../findbin.pl", "g++",
+                       cc_version[0] if cc_version is not None else '',
+                       cc_version[1] if cc_version is not None else ''
+               ],
+               haltOnFailure = True))
+
+       # see if ccache is available
+       factory.addStep(SetPropertyFromCommand(
+               property = "ccache_command",
+               command = ["which", "ccache"],
+               description = "Testing for ccache command",
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
+       ))
 
        # expire tree if needed
        if tree_expire > 0:
                factory.addStep(FileDownload(
-                       mastersrc = "expire.sh",
-                       slavedest = "../expire.sh",
-                       mode = 0755))
+                       name = "dlexpiresh",
+                       doStepIf = IsExpireRequested,
+                       mastersrc = scripts_dir + '/expire.sh',
+                       workerdest = "../expire.sh",
+                       mode = 0o755))
 
                factory.addStep(ShellCommand(
                        name = "expire",
@@ -372,8 +659,35 @@ for target in targets:
                        command = ["./expire.sh", str(tree_expire)],
                        workdir = ".",
                        haltOnFailure = True,
+                       doStepIf = IsExpireRequested,
                        timeout = 2400))
 
+       # cleanup.sh if needed
+       factory.addStep(FileDownload(
+               name = "dlcleanupsh",
+               mastersrc = scripts_dir + '/cleanup.sh',
+               workerdest = "../cleanup.sh",
+               mode = 0o755,
+               doStepIf = IsCleanupRequested))
+
+       factory.addStep(ShellCommand(
+               name = "cleanold",
+               description = "Cleaning previous builds",
+               command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
+               workdir = ".",
+               haltOnFailure = True,
+               doStepIf = IsCleanupRequested,
+               timeout = 2400))
+
+       factory.addStep(ShellCommand(
+               name = "cleanup",
+               description = "Cleaning work area",
+               command = ["./cleanup.sh", c['buildbotURL'], Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
+               workdir = ".",
+               haltOnFailure = True,
+               doStepIf = IsCleanupRequested,
+               timeout = 2400))
+
        # user-requested clean targets
        for tuple in CleanTargetMap:
                factory.addStep(ShellCommand(
@@ -381,36 +695,43 @@ for target in targets:
                        description = 'User-requested "make %s"' % tuple[1],
                        command = ["make", tuple[1], "V=s"],
                        env = MakeEnv(),
-                       doStepIf = IsCleanRequested(tuple[0])
+                       doStepIf = IsMakeCleanRequested(tuple[0])
                ))
 
-       factory.addStep(MasterShellCommand(
-               name = "maketag",
-               description = "Tagging Git repository",
-               command = [home_dir+'/maketag.sh', '-i', '-k', str(gpg_keyid or ''),
-                          '-p', str(gpg_passfile or ''), '-v', WithProperties("%(tag:-)s")],
-               path = home_dir+'/source.git',
-               env = {'GNUPGHOME': gpg_home},
-               haltOnFailure = True,
-               doStepIf = IsTaggingRequested,
-               locks = [tagLock.access('exclusive')]
-       ))
-
-       # switch to branch
+       # Workaround bug when switching from a checked out tag back to a branch
+       # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
        factory.addStep(ShellCommand(
-               name = "switchbranch",
-               description = "Checking out Git branch",
-               command = "if [ -d .git ]; then git fetch && git checkout '%s'; else exit 0; fi" % repo_branch,
+               name = "gitcheckout",
+               description = "Ensure that Git HEAD is sane",
+               command = "if [ -d .git ]; then git checkout -f %s; git branch --set-upstream-to origin/%s; else exit 0; fi" %(repo_branch, repo_branch),
+               haltOnFailure = True))
+
+       # check out the source
+       # Git() runs:
+         # if repo doesn't exist: 'git clone repourl'
+         # method 'clean' runs 'git clean -d -f', method fresh runs 'git clean -d -f x'. Only works with mode='full'
+         # 'git fetch -t repourl branch; git reset --hard revision'
+       # Git() parameters can't take a renderer until buildbot 0.8.10, so we have to split the fresh and clean cases
+       # if buildbot is updated, one can use: method = Interpolate('%(prop:do_cleanup:#?|fresh|clean)s')
+       factory.addStep(Git(
+               name = "gitclean",
+               repourl = repo_url,
+               branch = repo_branch,
+               mode = 'full',
+               method = 'clean',
                haltOnFailure = True,
-               doStepIf = IsNoTaggingRequested
+               doStepIf = IsGitCleanRequested,
        ))
 
-       # check out the source
        factory.addStep(Git(
+               name = "gitfresh",
                repourl = repo_url,
                branch = repo_branch,
-               mode = 'incremental',
-               method = 'clean'))
+               mode = 'full',
+               method = 'fresh',
+               haltOnFailure = True,
+               doStepIf = IsGitFreshRequested,
+       ))
 
        # update remote refs
        factory.addStep(ShellCommand(
@@ -420,24 +741,23 @@ for target in targets:
                haltOnFailure = True
        ))
 
-       # fetch tags
-       factory.addStep(ShellCommand(
-               name = "fetchtag",
-               description = "Fetching Git tags",
-               command = ["git", "fetch", "--tags", "--", repo_url],
-               haltOnFailure = True,
-               doStepIf = IsTaggingRequested
-       ))
-
        # switch to tag
        factory.addStep(ShellCommand(
                name = "switchtag",
                description = "Checking out Git tag",
-               command = ["git", "checkout", WithProperties("tags/v%(tag:-)s")],
+               command = ["git", "checkout", Interpolate("tags/v%(prop:tag:-)s")],
                haltOnFailure = True,
                doStepIf = IsTaggingRequested
        ))
 
+       # Verify that Git HEAD points to a tag or branch
+       # Ref: http://lists.infradead.org/pipermail/openwrt-devel/2019-June/017809.html
+       factory.addStep(ShellCommand(
+               name = "gitverify",
+               description = "Ensure that Git HEAD is pointing to a branch or tag",
+               command = 'git rev-parse --abbrev-ref HEAD | grep -vxqF HEAD || git show-ref --tags --dereference 2>/dev/null | sed -ne "/^$(git rev-parse HEAD) / { s|^.*/||; s|\\^.*||; p }" | grep -qE "^v[0-9][0-9]\\."',
+               haltOnFailure = True))
+
        factory.addStep(ShellCommand(
                name = "rmtmp",
                description = "Remove tmp folder",
@@ -455,32 +775,77 @@ for target in targets:
                description = "Remove feed symlinks",
                command=["rm", "-rf", "package/feeds/"]))
 
+       factory.addStep(StringDownload(
+               name = "ccachecc",
+               s = '#!/bin/sh\nexec ${CCACHE} ${CCC} "$@"\n',
+               workerdest = "../ccache_cc.sh",
+               mode = 0o755,
+       ))
+
+       factory.addStep(StringDownload(
+               name = "ccachecxx",
+               s = '#!/bin/sh\nexec ${CCACHE} ${CCXX} "$@"\n',
+               workerdest = "../ccache_cxx.sh",
+               mode = 0o755,
+       ))
+
+       # Git SSH
+       if git_ssh:
+               factory.addStep(StringDownload(
+                       name = "dlgitclonekey",
+                       s = git_ssh_key,
+                       workerdest = "../git-clone.key",
+                       mode = 0o600,
+               ))
+
+               factory.addStep(ShellCommand(
+                       name = "patchfeedsconf",
+                       description = "Patching feeds.conf",
+                       command="sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
+                       haltOnFailure = True
+               ))
+
        # feed
        factory.addStep(ShellCommand(
                name = "updatefeeds",
                description = "Updating feeds",
                command=["./scripts/feeds", "update"],
-               env = MakeEnv()))
+               env = MakeEnv(tryccache=True, overrides={'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {}),
+               haltOnFailure = True
+       ))
+
+       # Git SSH
+       if git_ssh:
+               factory.addStep(ShellCommand(
+                       name = "rmfeedsconf",
+                       description = "Removing feeds.conf",
+                       command=["rm", "feeds.conf"],
+                       haltOnFailure = True
+               ))
 
        # feed
        factory.addStep(ShellCommand(
                name = "installfeeds",
                description = "Installing feeds",
                command=["./scripts/feeds", "install", "-a"],
-               env = MakeEnv()))
+               env = MakeEnv(tryccache=True),
+               haltOnFailure = True
+       ))
 
        # seed config
-       factory.addStep(FileDownload(
-               mastersrc = "config.seed",
-               slavedest = ".config",
-               mode = 0644
-       ))
+       if config_seed is not None:
+               factory.addStep(StringDownload(
+                       name = "dlconfigseed",
+                       s = config_seed + '\n',
+                       workerdest = ".config",
+                       mode = 0o644
+               ))
 
        # configure
        factory.addStep(ShellCommand(
                name = "newconfig",
                description = "Seeding .config",
-               command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\n' >> .config" %(ts[0], ts[0], ts[1])
+               command = "printf 'CONFIG_TARGET_%s=y\\nCONFIG_TARGET_%s_%s=y\\nCONFIG_SIGNED_PACKAGES=%s\\n' >> .config" %(ts[0], ts[0], ts[1], 'y' if usign_key is not None else 'n')
        ))
 
        factory.addStep(ShellCommand(
@@ -508,29 +873,34 @@ for target in targets:
        ))
 
        # find libc suffix
-       factory.addStep(SetProperty(
+       factory.addStep(SetPropertyFromCommand(
                name = "libc",
                property = "libc",
                description = "Finding libc suffix",
                command = ["sed", "-ne", '/^CONFIG_LIBC=/ { s!^CONFIG_LIBC="\\(.*\\)"!\\1!; s!^musl$!!; s!.\\+!-&!p }', ".config"]))
 
-       # ccache helper
-       factory.addStep(FileDownload(
-               mastersrc = "ccache.sh",
-               slavedest = "ccache.sh",
-               mode = 0755
-       ))
+       # install build key
+       if usign_key is not None:
+               factory.addStep(StringDownload(
+                       name = "dlkeybuildpub",
+                       s = UsignSec2Pub(usign_key, usign_comment),
+                       workerdest = "key-build.pub",
+                       mode = 0o600,
+               ))
 
-       # ccache prepare
-       factory.addStep(ShellCommand(
-                name = "prepccache",
-                description = "Preparing ccache",
-                command = ["./ccache.sh"]
-       ))
+               factory.addStep(StringDownload(
+                       name = "dlkeybuild",
+                       s = "# fake private key",
+                       workerdest = "key-build",
+                       mode = 0o600,
+               ))
 
-       # install build key
-       factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="key-build", mode=0600))
-       factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="key-build.pub", mode=0600))
+               factory.addStep(StringDownload(
+                       name = "dlkeybuilducert",
+                       s = "# fake certificate",
+                       workerdest = "key-build.ucert",
+                       mode = 0o600,
+               ))
 
        # prepare dl
        factory.addStep(ShellCommand(
@@ -544,9 +914,9 @@ for target in targets:
        # prepare tar
        factory.addStep(ShellCommand(
                name = "dltar",
-               description = "Building GNU tar",
-               command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
-               env = MakeEnv(),
+               description = "Building and installing GNU tar",
+               command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/tar/compile", "V=s"],
+               env = MakeEnv(tryccache=True),
                haltOnFailure = True
        ))
 
@@ -554,10 +924,10 @@ for target in targets:
        factory.addStep(ShellCommand(
                name = "dlrun",
                description = "Populating dl/",
-               command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "download", "V=s"],
+               command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "download", "V=s"],
                env = MakeEnv(),
                logEnviron = False,
-               locks = [dlLock.access('exclusive')]
+               locks = [dlLock.access('exclusive')],
        ))
 
        factory.addStep(ShellCommand(
@@ -569,16 +939,16 @@ for target in targets:
        # build
        factory.addStep(ShellCommand(
                name = "tools",
-               description = "Building tools",
-               command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "tools/install", "V=s"],
-               env = MakeEnv(),
+               description = "Building and installing tools",
+               command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "tools/install", "V=s"],
+               env = MakeEnv(tryccache=True),
                haltOnFailure = True
        ))
 
        factory.addStep(ShellCommand(
                name = "toolchain",
-               description = "Building toolchain",
-               command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "toolchain/install", "V=s"],
+               description = "Building and installing toolchain",
+               command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "toolchain/install", "V=s"],
                env = MakeEnv(),
                haltOnFailure = True
        ))
@@ -586,12 +956,21 @@ for target in targets:
        factory.addStep(ShellCommand(
                name = "kmods",
                description = "Building kmods",
-               command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
+               command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
                env = MakeEnv(),
                #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
                haltOnFailure = True
        ))
 
+       # find kernel version
+       factory.addStep(SetPropertyFromCommand(
+               name = "kernelversion",
+               property = "kernelversion",
+               description = "Finding the effective Kernel version",
+               command = "make --no-print-directory -C target/linux/ val.LINUX_VERSION val.LINUX_RELEASE val.LINUX_VERMAGIC | xargs printf '%s-%s-%s\\n'",
+               env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
+       ))
+
        factory.addStep(ShellCommand(
                name = "pkgclean",
                description = "Cleaning up package build",
@@ -601,7 +980,7 @@ for target in targets:
        factory.addStep(ShellCommand(
                name = "pkgbuild",
                description = "Building packages",
-               command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
+               command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/compile", "V=s", "IGNORE_ERRORS=n m", "BUILD_LOG=1"],
                env = MakeEnv(),
                #env={'BUILD_LOG_DIR': 'bin/%s' %(ts[0])},
                haltOnFailure = True
@@ -611,7 +990,7 @@ for target in targets:
        factory.addStep(ShellCommand(
                name = "pkginstall",
                description = "Installing packages",
-               command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/install", "V=s"],
+               command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/install", "V=s"],
                env = MakeEnv(),
                haltOnFailure = True
        ))
@@ -619,24 +998,59 @@ for target in targets:
        factory.addStep(ShellCommand(
                name = "pkgindex",
                description = "Indexing packages",
-               command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "package/index", "V=s"],
+               command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES="],
                env = MakeEnv(),
                haltOnFailure = True
        ))
 
+       if enable_kmod_archive:
+               # embed kmod repository. Must happen before 'images'
+
+               # find rootfs staging directory
+               factory.addStep(SetPropertyFromCommand(
+                       name = "stageroot",
+                       property = "stageroot",
+                       description = "Finding the rootfs staging directory",
+                       command=["make", "--no-print-directory", "val.STAGING_DIR_ROOT"],
+                       env = { 'TOPDIR': Interpolate("%(kw:cwd)s/build", cwd=GetCwd) }
+               ))
+
+               factory.addStep(ShellCommand(
+                       name = "filesdir",
+                       description = "Creating file overlay directory",
+                       command=["mkdir", "-p", "files/etc/opkg"],
+                       haltOnFailure = True
+               ))
+
+               factory.addStep(ShellCommand(
+                       name = "kmodconfig",
+                       description = "Embedding kmod repository configuration",
+                       command=Interpolate("sed -e 's#^\\(src/gz .*\\)_core \\(.*\\)/packages$#&\\n\\1_kmods \\2/kmods/%(prop:kernelversion)s#' " +
+                                              "%(prop:stageroot)s/etc/opkg/distfeeds.conf > files/etc/opkg/distfeeds.conf"),
+                       haltOnFailure = True
+               ))
+
        #factory.addStep(IfBuiltinShellCommand(
        factory.addStep(ShellCommand(
                name = "images",
-               description = "Building images",
-               command=["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "target/install", "V=s"],
+               description = "Building and installing images",
+               command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "target/install", "V=s"],
+               env = MakeEnv(),
+               haltOnFailure = True
+       ))
+
+       factory.addStep(ShellCommand(
+               name = "buildinfo",
+               description = "Generating config.buildinfo, version.buildinfo and feeds.buildinfo",
+               command = "make -j1 buildinfo V=s || true",
                env = MakeEnv(),
                haltOnFailure = True
        ))
 
        factory.addStep(ShellCommand(
-               name = "diffconfig",
-               description = "Generating config.seed",
-               command=["make", "-j1", "diffconfig", "V=s"],
+               name = "json_overview_image_info",
+               description = "Generate profiles.json in target folder",
+               command = "make -j1 json_overview_image_info V=s || true",
                env = MakeEnv(),
                haltOnFailure = True
        ))
@@ -649,39 +1063,66 @@ for target in targets:
                haltOnFailure = True
        ))
 
+       if enable_kmod_archive:
+               factory.addStep(ShellCommand(
+                       name = "kmoddir",
+                       description = "Creating kmod directory",
+                       command=["mkdir", "-p", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1])],
+                       haltOnFailure = True
+               ))
+
+               factory.addStep(ShellCommand(
+                       name = "kmodprepare",
+                       description = "Preparing kmod archive",
+                       command=["rsync", "--include=/kmod-*.ipk", "--exclude=*", "-va",
+                                Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/packages/", target=ts[0], subtarget=ts[1]),
+                                Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
+                       haltOnFailure = True
+               ))
+
+               factory.addStep(ShellCommand(
+                       name = "kmodindex",
+                       description = "Indexing kmod archive",
+                       command=["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "package/index", "V=s", "CONFIG_SIGNED_PACKAGES=",
+                                Interpolate("PACKAGE_SUBDIRS=bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1])],
+                       env = MakeEnv(),
+                       haltOnFailure = True
+               ))
+
        # sign
-       if gpg_keyid is not None:
+       if ini.has_option("gpg", "key") or usign_key is not None:
                factory.addStep(MasterShellCommand(
                        name = "signprepare",
                        description = "Preparing temporary signing directory",
-                       command = ["mkdir", "-p", "%s/signing" %(home_dir)],
+                       command = ["mkdir", "-p", "%s/signing" %(work_dir)],
                        haltOnFailure = True
                ))
 
                factory.addStep(ShellCommand(
                        name = "signpack",
                        description = "Packing files to sign",
-                       command = WithProperties("find bin/targets/%s/%s%%(libc)s/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(ts[0], ts[1])),
+                       command = Interpolate("find bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/ bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/ -mindepth 1 -maxdepth 2 -type f -name sha256sums -print0 -or -name Packages -print0 | xargs -0 tar -czf sign.tar.gz", target=ts[0], subtarget=ts[1]),
                        haltOnFailure = True
                ))
 
                factory.addStep(FileUpload(
-                       slavesrc = "sign.tar.gz",
-                       masterdest = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
+                       workersrc = "sign.tar.gz",
+                       masterdest = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
                        haltOnFailure = True
                ))
 
                factory.addStep(MasterShellCommand(
                        name = "signfiles",
                        description = "Signing files",
-                       command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]), gpg_keyid, gpg_comment],
-                       env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile},
+                       command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1])],
+                       env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
                        haltOnFailure = True
                ))
 
                factory.addStep(FileDownload(
-                       mastersrc = "%s/signing/%s.%s.tar.gz" %(home_dir, ts[0], ts[1]),
-                       slavedest = "sign.tar.gz",
+                       name = "dlsigntargz",
+                       mastersrc = "%s/signing/%s.%s.tar.gz" %(work_dir, ts[0], ts[1]),
+                       workerdest = "sign.tar.gz",
                        haltOnFailure = True
                ))
 
@@ -696,56 +1137,131 @@ for target in targets:
        factory.addStep(ShellCommand(
                name = "dirprepare",
                description = "Preparing upload directory structure",
-               command = ["mkdir", "-p", WithProperties("tmp/upload/%%(prefix)stargets/%s/%s" %(ts[0], ts[1]), prefix=GetVersionPrefix)],
+               command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
                haltOnFailure = True
        ))
 
        factory.addStep(ShellCommand(
                name = "linkprepare",
                description = "Preparing repository symlink",
-               command = ["ln", "-s", "-f", WithProperties("../packages-%(basever)s", basever=GetBaseVersion), WithProperties("tmp/upload/%(prefix)spackages", prefix=GetVersionPrefix)],
+               command = ["ln", "-s", "-f", Interpolate("../packages-%(kw:basever)s", basever=GetBaseVersion()), Interpolate("tmp/upload/%(kw:prefix)spackages", prefix=GetVersionPrefix)],
                doStepIf = IsNoMasterBuild,
                haltOnFailure = True
        ))
 
+       if enable_kmod_archive:
+               factory.addStep(ShellCommand(
+                       name = "kmoddirprepare",
+                       description = "Preparing kmod archive upload directory",
+                       command = ["mkdir", "-p", Interpolate("tmp/upload/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s", target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
+                       haltOnFailure = True
+               ))
+
        factory.addStep(ShellCommand(
                name = "dirupload",
                description = "Uploading directory structure",
-               command = ["rsync", "-4", "-avz", "tmp/upload/", "%s/" %(rsync_bin_url)],
+               command = ["rsync", "-az"] + rsync_bin_defopts + ["tmp/upload/", "%s/" %(rsync_bin_url)],
                env={'RSYNC_PASSWORD': rsync_bin_key},
                haltOnFailure = True,
-               logEnviron = False
+               logEnviron = False,
+       ))
+
+       # download remote sha256sums to 'target-sha256sums'
+       factory.addStep(ShellCommand(
+               name = "target-sha256sums",
+               description = "Fetching remote sha256sums for target",
+               command = ["rsync", "-z"] + rsync_bin_defopts + [Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/sha256sums", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix), "target-sha256sums"],
+               env={'RSYNC_PASSWORD': rsync_bin_key},
+               logEnviron = False,
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
+       ))
+
+       # build list of files to upload
+       factory.addStep(FileDownload(
+               name = "dlsha2rsyncpl",
+               mastersrc = scripts_dir + '/sha2rsync.pl',
+               workerdest = "../sha2rsync.pl",
+               mode = 0o755,
+       ))
+
+       factory.addStep(ShellCommand(
+               name = "buildlist",
+               description = "Building list of files to upload",
+               command = ["../sha2rsync.pl", "target-sha256sums", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/sha256sums", target=ts[0], subtarget=ts[1]), "rsynclist"],
+               haltOnFailure = True,
+       ))
+
+       factory.addStep(FileDownload(
+               name = "dlrsync.sh",
+               mastersrc = scripts_dir + '/rsync.sh',
+               workerdest = "../rsync.sh",
+               mode = 0o755
        ))
 
+       # upload new files and update existing ones
        factory.addStep(ShellCommand(
                name = "targetupload",
                description = "Uploading target files",
-               command=["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]),
-                        "-avz", WithProperties("bin/targets/%s/%s%%(libc)s/" %(ts[0], ts[1])),
-                        WithProperties("%s/%%(prefix)stargets/%s/%s/" %(rsync_bin_url, ts[0], ts[1]), prefix=GetVersionPrefix)],
+               command=["../rsync.sh", "--exclude=/kmods/", "--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
+                        ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
+                        Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
                env={'RSYNC_PASSWORD': rsync_bin_key},
                haltOnFailure = True,
-               logEnviron = False
+               logEnviron = False,
+       ))
+
+       # delete files which don't exist locally
+       factory.addStep(ShellCommand(
+               name = "targetprune",
+               description = "Pruning target files",
+               command=["../rsync.sh", "--exclude=/kmods/", "--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
+                        ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/", target=ts[0], subtarget=ts[1]),
+                        Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
+               env={'RSYNC_PASSWORD': rsync_bin_key},
+               haltOnFailure = True,
+               logEnviron = False,
        ))
 
+       if enable_kmod_archive:
+               factory.addStep(ShellCommand(
+                       name = "kmodupload",
+                       description = "Uploading kmod archive",
+                       command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1])] + rsync_bin_defopts +
+                                ["-a", Interpolate("bin/targets/%(kw:target)s/%(kw:subtarget)s%(prop:libc)s/kmods/%(prop:kernelversion)s/", target=ts[0], subtarget=ts[1]),
+                                Interpolate("%(kw:rsyncbinurl)s/%(kw:prefix)stargets/%(kw:target)s/%(kw:subtarget)s/kmods/%(prop:kernelversion)s/", rsyncbinurl=rsync_bin_url, target=ts[0], subtarget=ts[1], prefix=GetVersionPrefix)],
+                       env={'RSYNC_PASSWORD': rsync_bin_key},
+                       haltOnFailure = True,
+                       logEnviron = False,
+               ))
+
        if rsync_src_url is not None:
+               factory.addStep(ShellCommand(
+                       name = "sourcelist",
+                       description = "Finding source archives to upload",
+                       command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer .config -printf '%f\\n' > sourcelist",
+                       haltOnFailure = True
+               ))
+
                factory.addStep(ShellCommand(
                        name = "sourceupload",
                        description = "Uploading source archives",
-                       command=["rsync", "-4", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-avz", "dl/", "%s/" %(rsync_src_url)],
+                       command=["../rsync.sh", "--files-from=sourcelist", "--size-only", "--delay-updates"] + rsync_src_defopts +
+                                [Interpolate("--partial-dir=.~tmp~%(kw:target)s~%(kw:subtarget)s~%(prop:workername)s", target=ts[0], subtarget=ts[1]), "-a", "dl/", "%s/" %(rsync_src_url)],
                        env={'RSYNC_PASSWORD': rsync_src_key},
                        haltOnFailure = True,
-                       logEnviron = False
+                       logEnviron = False,
                ))
 
        if False:
                factory.addStep(ShellCommand(
                        name = "packageupload",
                        description = "Uploading package files",
-                       command=["rsync", "-4", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-avz", "bin/packages/", "%s/packages/" %(rsync_bin_url)],
+                       command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-a"] + rsync_bin_defopts + ["bin/packages/", "%s/packages/" %(rsync_bin_url)],
                        env={'RSYNC_PASSWORD': rsync_bin_key},
                        haltOnFailure = False,
-                       logEnviron = False
+                       logEnviron = False,
                ))
 
        # logs
@@ -753,16 +1269,44 @@ for target in targets:
                factory.addStep(ShellCommand(
                        name = "upload",
                        description = "Uploading logs",
-                       command=["rsync", "-4", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-avz", "logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
+                       command=["../rsync.sh", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s~%s" %(ts[0], ts[1]), "-az"] + rsync_bin_defopts + ["logs/", "%s/logs/%s/%s/" %(rsync_bin_url, ts[0], ts[1])],
                        env={'RSYNC_PASSWORD': rsync_bin_key},
                        haltOnFailure = False,
                        alwaysRun = True,
-                       logEnviron = False
+                       logEnviron = False,
                ))
 
-       from buildbot.config import BuilderConfig
+       factory.addStep(ShellCommand(
+               name = "df",
+               description = "Reporting disk usage",
+               command=["df", "-h", "."],
+               env={'LC_ALL': 'C'},
+               haltOnFailure = False,
+               alwaysRun = True
+       ))
+
+       factory.addStep(ShellCommand(
+               name = "ccachestat",
+               description = "Reporting ccache stats",
+               command=["ccache", "-s"],
+               env = MakeEnv(overrides={ 'PATH': ["${PATH}", "./staging_dir/host/bin"] }),
+               want_stderr = False,
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
+               alwaysRun = True,
+       ))
+
+       c['builders'].append(BuilderConfig(name=target, workernames=slaveNames, factory=factory, nextBuild=GetNextBuild))
 
-       c['builders'].append(BuilderConfig(name=target, slavenames=slaveNames, factory=factory, nextBuild=GetNextBuild))
+       c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % target, builderNames=[ target ]))
+       force_factory.addStep(steps.Trigger(
+               name = "trigger_%s" % target,
+               description = "Triggering %s build" % target,
+               schedulerNames = [ "trigger_%s" % target ],
+               set_properties = { "reason": Property("reason"), "tag": TagPropertyValue },
+               doStepIf = IsTargetSelected(target)
+       ))
 
 
 ####### STATUS TARGETS
@@ -771,31 +1315,25 @@ for target in targets:
 # pushed to these targets. buildbot/status/*.py has a variety to choose from,
 # including web pages, email senders, and IRC bots.
 
-c['status'] = []
-
-from buildbot.status import html
-from buildbot.status.web import authz, auth
-
-if ini.has_option("status", "bind"):
-       if ini.has_option("status", "user") and ini.has_option("status", "password"):
-               authz_cfg=authz.Authz(
-                       # change any of these to True to enable; see the manual for more
-                       # options
-                       auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]),
-                       gracefulShutdown = 'auth',
-                       forceBuild = 'auth', # use this to test your slave once it is set up
-                       forceAllBuilds = 'auth',
-                       pingBuilder = False,
-                       stopBuild = 'auth',
-                       stopAllBuilds = 'auth',
-                       cancelPendingBuild = 'auth',
-               )
-               c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg))
-       else:
-               c['status'].append(html.WebStatus(http_port=ini.get("status", "bind")))
+if ini.has_option("phase1", "status_bind"):
+       c['www'] = {
+               'port': ini.get("phase1", "status_bind"),
+               'plugins': {
+                       'waterfall_view': True,
+                       'console_view': True,
+                       'grid_view': True
+               }
+       }
 
+       if ini.has_option("phase1", "status_user") and ini.has_option("phase1", "status_password"):
+               c['www']['auth'] = util.UserPasswordAuth([
+                       (ini.get("phase1", "status_user"), ini.get("phase1", "status_password"))
+               ])
+               c['www']['authz'] = util.Authz(
+                       allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
+                       roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase1", "status_user")]) ]
+               )
 
-from buildbot.status import words
 
 if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.has_option("irc", "channel"):
        irc_host = ini.get("irc", "host")
@@ -810,34 +1348,14 @@ if ini.has_option("irc", "host") and ini.has_option("irc", "nickname") and ini.h
        if ini.has_option("irc", "password"):
                irc_pass = ini.get("irc", "password")
 
-       irc = words.IRC(irc_host, irc_nick, port = irc_port, password = irc_pass,
-                       channels = [{ "channel": irc_chan }],
-                       notify_events = {
-                         'exception': 1,
-                         'successToFailure': 1,
-                         'failureToSuccess': 1
-                       }
+       irc = reporters.IRC(irc_host, irc_nick,
+               port = irc_port,
+               password = irc_pass,
+               channels = [ irc_chan ],
+               notify_events = [ 'exception', 'problem', 'recovery' ]
        )
 
-       c['status'].append(irc)
-
-
-####### PROJECT IDENTITY
-
-# the 'title' string will appear at the top of this buildbot
-# installation's html.WebStatus home page (linked to the
-# 'titleURL') and is embedded in the title of the waterfall HTML page.
-
-c['title'] = ini.get("general", "title")
-c['titleURL'] = ini.get("general", "title_url")
-
-# the 'buildbotURL' string should point to the location where the buildbot's
-# internal web server (usually the html.WebStatus page) is visible. This
-# typically uses the port number set in the Waterfall 'status' entry, but
-# with an externally-visible host name which the buildbot cannot figure out
-# without some help.
-
-c['buildbotURL'] = ini.get("general", "buildbot_url")
+       c['services'].append(irc)
 
 ####### DB URL
 
@@ -846,3 +1364,5 @@ c['db'] = {
        # this at its default for all but the largest installations.
        'db_url' : "sqlite:///state.sqlite",
 }
+
+c['buildbotNetUsageData'] = None