phase1,phase2: s/master/main for phase{1,2}
[buildbot.git] / phase2 / master.cfg
index d1513c3d0ab85fe611f22fad1171dfd078ad96a3..c399c66eb0fc9467e250e85d6b2652d99388abab 100644 (file)
@@ -3,26 +3,34 @@
 
 import os
 import re
+import sys
 import base64
 import subprocess
 import configparser
 
-from datetime import timedelta
+from dateutil.tz import tzutc
+from datetime import datetime, timedelta
+
+from twisted.internet import defer
+from twisted.python import log
 
 from buildbot import locks
+from buildbot.data import resultspec
 from buildbot.changes import filter
 from buildbot.changes.gitpoller import GitPoller
 from buildbot.config import BuilderConfig
 from buildbot.plugins import schedulers
 from buildbot.plugins import steps
 from buildbot.plugins import util
+from buildbot.process import results
 from buildbot.process.factory import BuildFactory
 from buildbot.process.properties import Property
-from buildbot.process.properties import WithProperties
+from buildbot.process.properties import Interpolate
+from buildbot.process import properties
 from buildbot.schedulers.basic import SingleBranchScheduler
 from buildbot.schedulers.forcesched import ForceScheduler
 from buildbot.steps.master import MasterShellCommand
-from buildbot.steps.shell import SetProperty
+from buildbot.steps.shell import SetPropertyFromCommand
 from buildbot.steps.shell import ShellCommand
 from buildbot.steps.transfer import FileDownload
 from buildbot.steps.transfer import FileUpload
@@ -30,6 +38,10 @@ from buildbot.steps.transfer import StringDownload
 from buildbot.worker import Worker
 
 
+if not os.path.exists("twistd.pid"):
+    with open("twistd.pid", "w") as pidfile:
+        pidfile.write("{}".format(os.getpid()))
+
 ini = configparser.ConfigParser()
 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
 
@@ -42,68 +54,40 @@ buildbot_url = ini.get("phase2", "buildbot_url")
 # a shorter alias to save typing.
 c = BuildmasterConfig = {}
 
-####### BUILDSLAVES
+####### BUILDWORKERS
 
-# The 'workers' list defines the set of recognized buildslaves. Each element is
-# a Worker object, specifying a unique slave name and password.  The same
-# slave name and password must be configured on the slave.
+# The 'workers' list defines the set of recognized buildworkers. Each element is
+# a Worker object, specifying a unique worker name and password.  The same
+# worker name and password must be configured on the worker.
 
-slave_port = 9990
+worker_port = 9990
 persistent = False
-other_builds = 0
-tree_expire = 0
-git_ssh = False
-git_ssh_key = None
 
 if ini.has_option("phase2", "port"):
-       slave_port = ini.get("phase2", "port")
+       worker_port = ini.get("phase2", "port")
 
 if ini.has_option("phase2", "persistent"):
        persistent = ini.getboolean("phase2", "persistent")
 
-if ini.has_option("phase2", "other_builds"):
-       other_builds = ini.getint("phase2", "other_builds")
-
-if ini.has_option("phase2", "expire"):
-       tree_expire = ini.getint("phase2", "expire")
-
-if ini.has_option("general", "git_ssh"):
-       git_ssh = ini.getboolean("general", "git_ssh")
-
-if ini.has_option("general", "git_ssh_key"):
-       git_ssh_key = ini.get("general", "git_ssh_key")
-else:
-       git_ssh = False
-
 c['workers'] = []
-max_builds = dict()
 
 for section in ini.sections():
-       if section.startswith("slave "):
+       if section.startswith("worker "):
                if ini.has_option(section, "name") and ini.has_option(section, "password") and \
-                  ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
+                       ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
                        name = ini.get(section, "name")
                        password = ini.get(section, "password")
-                       sl_props = { 'shared_wd': False }
-                       max_builds[name] = 1
-
-                       if ini.has_option(section, "builds"):
-                               max_builds[name] = ini.getint(section, "builds")
-
-                       if max_builds[name] == 1:
-                               sl_props['shared_wd'] = True
+                       sl_props = { 'shared_wd': True }
 
                        if ini.has_option(section, "shared_wd"):
                                sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
-                               if sl_props['shared_wd'] and (max_builds != 1):
-                                       raise ValueError('max_builds must be 1 with shared workdir!')
 
-                       c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
+                       c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
 
-# 'slavePortnum' defines the TCP port to listen on for connections from workers.
-# This must match the value configured into the buildslaves (with their
+# 'workerPortnum' defines the TCP port to listen on for connections from workers.
+# This must match the value configured into the buildworkers (with their
 # --master option)
-c['protocols'] = {'pb': {'port': slave_port}}
+c['protocols'] = {'pb': {'port': worker_port}}
 
 # coalesce builds
 c['collapseRequests'] = True
@@ -131,7 +115,7 @@ if ini.has_option("rsync", "source_url"):
 
 rsync_sdk_url = None
 rsync_sdk_key = None
-rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
+rsync_sdk_pat = "openwrt-sdk-*.tar.*"
 
 if ini.has_option("rsync", "sdk_url"):
        rsync_sdk_url = ini.get("rsync", "sdk_url")
@@ -142,8 +126,10 @@ if ini.has_option("rsync", "sdk_password"):
 if ini.has_option("rsync", "sdk_pattern"):
        rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
 
+rsync_defopts = ["-4", "-v", "--timeout=120"]
+
 repo_url = ini.get("repo", "url")
-repo_branch = "master"
+repo_branch = "main"
 
 if ini.has_option("repo", "branch"):
        repo_branch = ini.get("repo", "branch")
@@ -188,10 +174,10 @@ c['change_source'] = []
 
 def parse_feed_entry(line):
        parts = line.strip().split()
-       if parts[0] == "src-git":
+       if parts[0].startswith("src-git"):
                feeds.append(parts)
                url = parts[2].strip().split(';')
-               branch = url[1] if len(url) > 1 else 'master'
+               branch = url[1] if len(url) > 1 else 'main'
                feedbranches[url[0]] = branch
                c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
 
@@ -200,12 +186,15 @@ make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source
 
 line = make.stdout.readline()
 if line:
-       parse_feed_entry(line)
+       parse_feed_entry(str(line, 'utf-8'))
 
-with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
+with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
        for line in f:
                parse_feed_entry(line)
 
+if len(c['change_source']) == 0:
+       log.err("FATAL ERROR: no change_sources defined, aborting!")
+       sys.exit(-1)
 
 ####### SCHEDULERS
 
@@ -267,8 +256,9 @@ c['schedulers'].append(ForceScheduler(
 
 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
 # what steps, and which workers can execute them.  Note that any particular build will
-# only take place on one slave.
+# only take place on one worker.
 
+@properties.renderer
 def GetDirectorySuffix(props):
        verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
        if props.hasProperty("release_version"):
@@ -277,12 +267,7 @@ def GetDirectorySuffix(props):
                        return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
        return ""
 
-def GetNumJobs(props):
-       if props.hasProperty("workername") and props.hasProperty("nproc"):
-               return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
-       else:
-               return 1
-
+@properties.renderer
 def GetCwd(props):
        if props.hasProperty("builddir"):
                return props["builddir"]
@@ -295,7 +280,7 @@ def IsArchitectureSelected(target):
        def CheckArchitectureProperty(step):
                try:
                        options = step.getProperty("options")
-                       if type(options) is dict:
+                       if isinstance(options, dict):
                                selected_arch = options.get("architecture", "all")
                                if selected_arch != "all" and selected_arch != target:
                                        return False
@@ -309,7 +294,7 @@ def IsArchitectureSelected(target):
 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
        try:
                seckey = base64.b64decode(seckey)
-       except:
+       except Exception:
                return None
 
        return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
@@ -318,21 +303,96 @@ def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
 def IsSharedWorkdir(step):
        return bool(step.getProperty("shared_wd"))
 
+@defer.inlineCallbacks
+def getNewestCompleteTime(bldr):
+       """Returns the complete_at of the latest completed and not SKIPPED
+       build request for this builder, or None if there are no such build
+       requests. We need to filter out SKIPPED requests because we're
+       using collapseRequests=True which is unfortunately marking all
+       previous requests as complete when new buildset is created.
+
+       @returns: datetime instance or None, via Deferred
+       """
+
+       bldrid = yield bldr.getBuilderId()
+       completed = yield bldr.master.data.get(
+                       ('builders', bldrid, 'buildrequests'),
+                       [
+                               resultspec.Filter('complete', 'eq', [True]),
+                               resultspec.Filter('results', 'ne', [results.SKIPPED]),
+                       ],
+                       order=['-complete_at'], limit=1)
+       if not completed:
+               return
+
+       complete_at = completed[0]['complete_at']
+
+       last_build = yield bldr.master.data.get(
+                       ('builds', ),
+                       [
+                               resultspec.Filter('builderid', 'eq', [bldrid]),
+                       ],
+                       order=['-started_at'], limit=1)
+
+       if last_build and last_build[0]:
+               last_complete_at = last_build[0]['complete_at']
+               if last_complete_at and (last_complete_at > complete_at):
+                       return last_complete_at
+
+       return complete_at
+
+@defer.inlineCallbacks
+def prioritizeBuilders(master, builders):
+       """Returns sorted list of builders by their last timestamp of completed and
+       not skipped build.
+
+       @returns: list of sorted builders
+       """
+
+       def is_building(bldr):
+               return bool(bldr.building) or bool(bldr.old_building)
+
+       def bldr_info(bldr):
+               d = defer.maybeDeferred(getNewestCompleteTime, bldr)
+               d.addCallback(lambda complete_at: (complete_at, bldr))
+               return d
+
+       def bldr_sort(item):
+               (complete_at, bldr) = item
 
+               if not complete_at:
+                       date = datetime.min
+                       complete_at = date.replace(tzinfo=tzutc())
+
+               if is_building(bldr):
+                       date = datetime.max
+                       complete_at = date.replace(tzinfo=tzutc())
+
+               return (complete_at, bldr.name)
+
+       results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
+       results.sort(key=bldr_sort)
+
+       for r in results:
+               log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
+
+       return [r[1] for r in results]
+
+c['prioritizeBuilders'] = prioritizeBuilders
 c['builders'] = []
 
-dlLock = locks.WorkerLock("slave_dl")
+dlLock = locks.WorkerLock("worker_dl")
 
-slaveNames = [ ]
+workerNames = [ ]
 
-for slave in c['workers']:
-       slaveNames.append(slave.workername)
+for worker in c['workers']:
+       workerNames.append(worker.workername)
 
 force_factory = BuildFactory()
 
 c['builders'].append(BuilderConfig(
        name        = "00_force_build",
-       workernames = slaveNames,
+       workernames = workerNames,
        factory     = force_factory))
 
 for arch in arches:
@@ -350,7 +410,7 @@ for arch in arches:
                doStepIf = IsSharedWorkdir))
 
        # find number of cores
-       factory.addStep(SetProperty(
+       factory.addStep(SetPropertyFromCommand(
                name = "nproc",
                property = "nproc",
                description = "Finding number of CPUs",
@@ -366,7 +426,7 @@ for arch in arches:
                factory.addStep(ShellCommand(
                        name = "cleanold",
                        description = "Cleaning previous builds",
-                       command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
+                       command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
                        workdir = ".",
                        haltOnFailure = True,
                        timeout = 2400))
@@ -374,22 +434,7 @@ for arch in arches:
                factory.addStep(ShellCommand(
                        name = "cleanup",
                        description = "Cleaning work area",
-                       command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
-                       workdir = ".",
-                       haltOnFailure = True,
-                       timeout = 2400))
-
-       # expire tree if needed
-       elif tree_expire > 0:
-               factory.addStep(FileDownload(
-                       mastersrc = scripts_dir + '/expire.sh',
-                       workerdest = "../expire.sh",
-                       mode = 0o755))
-
-               factory.addStep(ShellCommand(
-                       name = "expire",
-                       description = "Checking for build tree expiry",
-                       command = ["./expire.sh", str(tree_expire)],
+                       command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
                        workdir = ".",
                        haltOnFailure = True,
                        timeout = 2400))
@@ -403,7 +448,7 @@ for arch in arches:
        factory.addStep(ShellCommand(
                name = "downloadsdk",
                description = "Downloading SDK archive",
-               command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
+               command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
                env={'RSYNC_PASSWORD': rsync_sdk_key},
                haltOnFailure = True,
                logEnviron = False))
@@ -417,7 +462,7 @@ for arch in arches:
        factory.addStep(ShellCommand(
                name = "updatesdk",
                description = "Updating SDK",
-               command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
+               command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
                haltOnFailure = True))
 
        factory.addStep(ShellCommand(
@@ -432,7 +477,7 @@ for arch in arches:
                workerdest = "sdk/getversion.mk",
                mode = 0o755))
 
-       factory.addStep(SetProperty(
+       factory.addStep(SetPropertyFromCommand(
                name = "getversion",
                property = "release_version",
                description = "Finding SDK release version",
@@ -483,43 +528,13 @@ for arch in arches:
                command = ["./ccache.sh"],
                haltOnFailure = True))
 
-       factory.addStep(ShellCommand(
-               name = "patchfeedsconfgitfull",
-               description = "Patching feeds.conf to use src-git-full",
-               workdir = "build/sdk",
-               command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
-               haltOnFailure = True))
-
-       if git_ssh:
-               factory.addStep(StringDownload(
-                       name = "dlgitclonekey",
-                       s = git_ssh_key,
-                       workerdest = "../git-clone.key",
-                       mode = 0o600))
-
-               factory.addStep(ShellCommand(
-                       name = "patchfeedsconf",
-                       description = "Patching feeds.conf to use SSH cloning",
-                       workdir = "build/sdk",
-                       command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
-                       haltOnFailure = True))
-
        factory.addStep(ShellCommand(
                name = "updatefeeds",
                description = "Updating feeds",
                workdir = "build/sdk",
                command = ["./scripts/feeds", "update", "-f"],
-               env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
                haltOnFailure = True))
 
-       if git_ssh:
-               factory.addStep(ShellCommand(
-                       name = "rmfeedsconf",
-                       description = "Removing feeds.conf",
-                       workdir = "build/sdk",
-                       command=["rm", "feeds.conf"],
-                       haltOnFailure = True))
-
        factory.addStep(ShellCommand(
                name = "installfeeds",
                description = "Installing feeds",
@@ -532,7 +547,9 @@ for arch in arches:
                description = "Clearing failure logs",
                workdir = "build/sdk",
                command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
-               haltOnFailure = False
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = True,
        ))
 
        factory.addStep(ShellCommand(
@@ -540,8 +557,8 @@ for arch in arches:
                description = "Building packages",
                workdir = "build/sdk",
                timeout = 3600,
-               command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
-               env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
+               command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
+               env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
                haltOnFailure = True))
 
        factory.addStep(ShellCommand(
@@ -550,6 +567,18 @@ for arch in arches:
                workdir = "build/sdk",
                command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
 
+       factory.addStep(ShellCommand(
+               name = "checksums",
+               description = "Calculating checksums",
+               descriptionDone="Checksums calculated",
+               workdir = "build/sdk",
+               command = "cd bin/packages/%s; " %(arch[0])
+               + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | "
+               + "sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | "
+               + r"sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums",
+               haltOnFailure = True
+       ))
+
        if ini.has_option("gpg", "key") or usign_key is not None:
                factory.addStep(MasterShellCommand(
                        name = "signprepare",
@@ -594,11 +623,45 @@ for arch in arches:
                        haltOnFailure = True
                ))
 
+       # download remote sha256sums to 'target-sha256sums'
+       factory.addStep(ShellCommand(
+               name = "target-sha256sums",
+               description = "Fetching remote sha256sums for arch",
+               command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
+               env={'RSYNC_PASSWORD': rsync_bin_key},
+               logEnviron = False,
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
+       ))
+
+       factory.addStep(FileDownload(
+               name="dlrsync.sh",
+               mastersrc = scripts_dir + "/rsync.sh",
+               workerdest = "../rsync.sh",
+               mode = 0o755
+       ))
+
+       factory.addStep(FileDownload(
+               name = "dlsha2rsyncpl",
+               mastersrc = scripts_dir + "/sha2rsync.pl",
+               workerdest = "../sha2rsync.pl",
+               mode = 0o755,
+       ))
+
+       factory.addStep(ShellCommand(
+               name = "buildlist",
+               description = "Building list of files to upload",
+               workdir = "build/sdk",
+               command = ["../../sha2rsync.pl", "../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
+               haltOnFailure = True,
+       ))
+
        factory.addStep(ShellCommand(
                name = "uploadprepare",
                description = "Preparing package directory",
                workdir = "build/sdk",
-               command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
+               command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
                env={'RSYNC_PASSWORD': rsync_bin_key},
                haltOnFailure = True,
                logEnviron = False
@@ -608,7 +671,17 @@ for arch in arches:
                name = "packageupload",
                description = "Uploading package files",
                workdir = "build/sdk",
-               command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
+               command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
+               env={'RSYNC_PASSWORD': rsync_bin_key},
+               haltOnFailure = True,
+               logEnviron = False
+       ))
+
+       factory.addStep(ShellCommand(
+               name = "packageprune",
+               description = "Pruning package files",
+               workdir = "build/sdk",
+               command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
                env={'RSYNC_PASSWORD': rsync_bin_key},
                haltOnFailure = True,
                logEnviron = False
@@ -618,7 +691,7 @@ for arch in arches:
                name = "logprepare",
                description = "Preparing log directory",
                workdir = "build/sdk",
-               command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
+               command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
                env={'RSYNC_PASSWORD': rsync_bin_key},
                haltOnFailure = True,
                logEnviron = False
@@ -629,24 +702,30 @@ for arch in arches:
                description = "Finding failure logs",
                workdir = "build/sdk/logs/package/feeds",
                command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
-               haltOnFailure = False
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = True,
        ))
 
        factory.addStep(ShellCommand(
                name = "logcollect",
                description = "Collecting failure logs",
                workdir = "build/sdk",
-               command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
-               haltOnFailure = False
+               command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = True,
        ))
 
        factory.addStep(ShellCommand(
                name = "logupload",
                description = "Uploading failure logs",
                workdir = "build/sdk",
-               command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
+               command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
                env={'RSYNC_PASSWORD': rsync_bin_key},
                haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = True,
                logEnviron = False
        ))
 
@@ -655,7 +734,7 @@ for arch in arches:
                        name = "sourcelist",
                        description = "Finding source archives to upload",
                        workdir = "build/sdk",
-                       command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
+                       command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
                        haltOnFailure = True
                ))
 
@@ -663,10 +742,12 @@ for arch in arches:
                        name = "sourceupload",
                        description = "Uploading source archives",
                        workdir = "build/sdk",
-                       command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
-                                  WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
+                       command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
+                                       Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
                        env={'RSYNC_PASSWORD': rsync_src_key},
                        haltOnFailure = False,
+                       flunkOnFailure = False,
+                       warnOnFailure = True,
                        logEnviron = False
                ))
 
@@ -676,10 +757,34 @@ for arch in arches:
                command=["df", "-h", "."],
                env={'LC_ALL': 'C'},
                haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
                alwaysRun = True
        ))
 
-       c['builders'].append(BuilderConfig(name=arch[0], workernames=slaveNames, factory=factory))
+       factory.addStep(ShellCommand(
+               name = "du",
+               description = "Reporting estimated file space usage",
+               command=["du", "-sh", "."],
+               env={'LC_ALL': 'C'},
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
+               alwaysRun = True
+       ))
+
+       factory.addStep(ShellCommand(
+               name = "ccachestat",
+               description = "Reporting ccache stats",
+               command=["ccache", "-s"],
+               want_stderr = False,
+               haltOnFailure = False,
+               flunkOnFailure = False,
+               warnOnFailure = False,
+               alwaysRun = True,
+       ))
+
+       c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
 
        c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
        force_factory.addStep(steps.Trigger(