X-Git-Url: http://git.openwrt.org/?a=blobdiff_plain;f=phase2%2Fmaster.cfg;h=c399c66eb0fc9467e250e85d6b2652d99388abab;hb=HEAD;hp=20ff19ed9c2e8a2c7f5a6343c977a70641c67984;hpb=b8c0ac2e1384dbf133a5c1ea479d348ce2a9ee74;p=buildbot.git diff --git a/phase2/master.cfg b/phase2/master.cfg index 20ff19e..c399c66 100644 --- a/phase2/master.cfg +++ b/phase2/master.cfg @@ -3,15 +3,49 @@ import os import re +import sys +import base64 import subprocess -import ConfigParser +import configparser + +from dateutil.tz import tzutc +from datetime import datetime, timedelta + +from twisted.internet import defer +from twisted.python import log from buildbot import locks +from buildbot.data import resultspec +from buildbot.changes import filter +from buildbot.changes.gitpoller import GitPoller +from buildbot.config import BuilderConfig +from buildbot.plugins import schedulers +from buildbot.plugins import steps +from buildbot.plugins import util +from buildbot.process import results +from buildbot.process.factory import BuildFactory +from buildbot.process.properties import Property +from buildbot.process.properties import Interpolate +from buildbot.process import properties +from buildbot.schedulers.basic import SingleBranchScheduler +from buildbot.schedulers.forcesched import ForceScheduler +from buildbot.steps.master import MasterShellCommand +from buildbot.steps.shell import SetPropertyFromCommand +from buildbot.steps.shell import ShellCommand +from buildbot.steps.transfer import FileDownload +from buildbot.steps.transfer import FileUpload +from buildbot.steps.transfer import StringDownload +from buildbot.worker import Worker + -ini = ConfigParser.ConfigParser() -ini.read("./config.ini") +if not os.path.exists("twistd.pid"): + with open("twistd.pid", "w") as pidfile: + pidfile.write("{}".format(os.getpid())) -buildbot_url = ini.get("general", "buildbot_url") +ini = configparser.ConfigParser() +ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini")) + +buildbot_url = ini.get("phase2", "buildbot_url") # This is a sample buildmaster config file. It must be installed as # 'master.cfg' in your buildmaster's base directory. @@ -20,58 +54,54 @@ buildbot_url = ini.get("general", "buildbot_url") # a shorter alias to save typing. c = BuildmasterConfig = {} -####### BUILDSLAVES +####### BUILDWORKERS -# The 'slaves' list defines the set of recognized buildslaves. Each element is -# a BuildSlave object, specifying a unique slave name and password. The same -# slave name and password must be configured on the slave. -from buildbot.buildslave import BuildSlave +# The 'workers' list defines the set of recognized buildworkers. Each element is +# a Worker object, specifying a unique worker name and password. The same +# worker name and password must be configured on the worker. -slave_port = 9990 +worker_port = 9990 persistent = False -other_builds = 0 -tree_expire = 0 - -if ini.has_option("general", "port"): - slave_port = ini.getint("general", "port") - -if ini.has_option("general", "persistent"): - persistent = ini.getboolean("general", "persistent") -if ini.has_option("general", "other_builds"): - other_builds = ini.getint("general", "other_builds") +if ini.has_option("phase2", "port"): + worker_port = ini.get("phase2", "port") -if ini.has_option("general", "expire"): - tree_expire = ini.getint("general", "expire") +if ini.has_option("phase2", "persistent"): + persistent = ini.getboolean("phase2", "persistent") -c['slaves'] = [] -max_builds = dict() +c['workers'] = [] for section in ini.sections(): - if section.startswith("slave "): - if ini.has_option(section, "name") and ini.has_option(section, "password"): + if section.startswith("worker "): + if ini.has_option(section, "name") and ini.has_option(section, "password") and \ + ini.has_option(section, "phase") and ini.getint(section, "phase") == 2: name = ini.get(section, "name") password = ini.get(section, "password") - max_builds[name] = 1 - if ini.has_option(section, "builds"): - max_builds[name] = ini.getint(section, "builds") - c['slaves'].append(BuildSlave(name, password, max_builds = max_builds[name])) + sl_props = { 'shared_wd': True } -# 'slavePortnum' defines the TCP port to listen on for connections from slaves. -# This must match the value configured into the buildslaves (with their + if ini.has_option(section, "shared_wd"): + sl_props['shared_wd'] = ini.getboolean(section, "shared_wd") + + c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props)) + +# 'workerPortnum' defines the TCP port to listen on for connections from workers. +# This must match the value configured into the buildworkers (with their # --master option) -c['slavePortnum'] = slave_port +c['protocols'] = {'pb': {'port': worker_port}} # coalesce builds -c['mergeRequests'] = True +c['collapseRequests'] = True # Reduce amount of backlog data -c['buildHorizon'] = 30 -c['logHorizon'] = 20 +c['configurators'] = [util.JanitorConfigurator( + logHorizon=timedelta(days=3), + hour=6, +)] ####### CHANGESOURCES -home_dir = os.path.abspath(ini.get("general", "homedir")) +work_dir = os.path.abspath(ini.get("general", "workdir") or ".") +scripts_dir = os.path.abspath("../scripts") rsync_bin_url = ini.get("rsync", "binary_url") rsync_bin_key = ini.get("rsync", "binary_password") @@ -85,7 +115,7 @@ if ini.has_option("rsync", "source_url"): rsync_sdk_url = None rsync_sdk_key = None -rsync_sdk_pat = "lede-sdk-*.tar.xz" +rsync_sdk_pat = "openwrt-sdk-*.tar.*" if ini.has_option("rsync", "sdk_url"): rsync_sdk_url = ini.get("rsync", "sdk_url") @@ -96,36 +126,42 @@ if ini.has_option("rsync", "sdk_password"): if ini.has_option("rsync", "sdk_pattern"): rsync_sdk_pat = ini.get("rsync", "sdk_pattern") -gpg_home = "~/.gnupg" -gpg_keyid = None -gpg_comment = "Unattended build signature" -gpg_passfile = "/dev/null" +rsync_defopts = ["-4", "-v", "--timeout=120"] -if ini.has_option("gpg", "home"): - gpg_home = ini.get("gpg", "home") +repo_url = ini.get("repo", "url") +repo_branch = "main" -if ini.has_option("gpg", "keyid"): - gpg_keyid = ini.get("gpg", "keyid") +if ini.has_option("repo", "branch"): + repo_branch = ini.get("repo", "branch") -if ini.has_option("gpg", "comment"): - gpg_comment = ini.get("gpg", "comment") +usign_key = None +usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key" -if ini.has_option("gpg", "passfile"): - gpg_passfile = ini.get("gpg", "passfile") +if ini.has_option("usign", "key"): + usign_key = ini.get("usign", "key") + +if ini.has_option("usign", "comment"): + usign_comment = ini.get("usign", "comment") # find arches arches = [ ] archnames = [ ] -findarches = subprocess.Popen([home_dir+'/dumpinfo.pl', 'architectures'], - stdout = subprocess.PIPE, cwd = home_dir+'/source.git') +if not os.path.isdir(work_dir+'/source.git'): + subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git']) +else: + subprocess.call(["git", "pull"], cwd = work_dir+'/source.git') + +os.makedirs(work_dir+'/source.git/tmp', exist_ok=True) +findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'], + stdout = subprocess.PIPE, cwd = work_dir+'/source.git') while True: line = findarches.stdout.readline() if not line: break - at = line.strip().split() + at = line.decode().strip().split() arches.append(at) archnames.append(at[0]) @@ -134,70 +170,104 @@ while True: feeds = [] feedbranches = dict() -from buildbot.changes.gitpoller import GitPoller c['change_source'] = [] -with open(home_dir+'/source.git/feeds.conf.default', 'r') as f: +def parse_feed_entry(line): + parts = line.strip().split() + if parts[0].startswith("src-git"): + feeds.append(parts) + url = parts[2].strip().split(';') + branch = url[1] if len(url) > 1 else 'main' + feedbranches[url[0]] = branch + c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300)) + +make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'], + env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE) + +line = make.stdout.readline() +if line: + parse_feed_entry(str(line, 'utf-8')) + +with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f: for line in f: - parts = line.strip().split() - if parts[0] == "src-git": - feeds.append(parts) - url = parts[2].strip().split(';') - branch = url[1] if len(url) > 1 else 'master' - feedbranches[url[0]] = branch - c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300)) + parse_feed_entry(line) +if len(c['change_source']) == 0: + log.err("FATAL ERROR: no change_sources defined, aborting!") + sys.exit(-1) ####### SCHEDULERS # Configure the Schedulers, which decide how to react to incoming changes. In this # case, just kick off a 'basebuild' build -def branch_change_filter(change): - return change.branch == feedbranches[change.repository] - -from buildbot.schedulers.basic import SingleBranchScheduler -from buildbot.schedulers.forcesched import ForceScheduler -from buildbot.changes import filter c['schedulers'] = [] c['schedulers'].append(SingleBranchScheduler( - name="all", - change_filter=filter.ChangeFilter(filter_fn=branch_change_filter), - treeStableTimer=60, - builderNames=archnames)) + name = "all", + change_filter = filter.ChangeFilter( + filter_fn = lambda change: change.branch == feedbranches[change.repository] + ), + treeStableTimer = 60, + builderNames = archnames)) c['schedulers'].append(ForceScheduler( - name="force", - builderNames=archnames)) + name = "force", + buttonName = "Force builds", + label = "Force build details", + builderNames = [ "00_force_build" ], + + codebases = [ + util.CodebaseParameter( + "", + label = "Repository", + branch = util.FixedParameter(name = "branch", default = ""), + revision = util.FixedParameter(name = "revision", default = ""), + repository = util.FixedParameter(name = "repository", default = ""), + project = util.FixedParameter(name = "project", default = "") + ) + ], + + reason = util.StringParameter( + name = "reason", + label = "Reason", + default = "Trigger build", + required = True, + size = 80 + ), + + properties = [ + util.NestedParameter( + name="options", + label="Build Options", + layout="vertical", + fields=[ + util.ChoiceStringParameter( + name = "architecture", + label = "Build architecture", + default = "all", + choices = [ "all" ] + archnames + ) + ] + ) + ] +)) ####### BUILDERS # The 'builders' list defines the Builders, which tell Buildbot how to perform a build: -# what steps, and which slaves can execute them. Note that any particular build will -# only take place on one slave. - -from buildbot.process.factory import BuildFactory -from buildbot.steps.source import Git -from buildbot.steps.shell import ShellCommand -from buildbot.steps.shell import SetProperty -from buildbot.steps.transfer import FileUpload -from buildbot.steps.transfer import FileDownload -from buildbot.steps.master import MasterShellCommand -from buildbot.process.properties import WithProperties - +# what steps, and which workers can execute them. Note that any particular build will +# only take place on one worker. +@properties.renderer def GetDirectorySuffix(props): - if props.hasProperty("slavename") and re.match("^[^-]+-[0-9]+\.[0-9]+-[^-]+$", props["slavename"]): - return "-%s" % props["slavename"].split('-')[1] - else: - return "" - -def GetNumJobs(props): - if props.hasProperty("slavename") and props.hasProperty("nproc"): - return ((int(props["nproc"]) / (max_builds[props["slavename"]] + other_builds)) + 1) - else: - return 1 - + verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$') + if props.hasProperty("release_version"): + m = verpat.match(props["release_version"]) + if m is not None: + return "-%02d.%02d" %(int(m.group(1)), int(m.group(2))) + return "" + +@properties.renderer def GetCwd(props): if props.hasProperty("builddir"): return props["builddir"] @@ -206,57 +276,165 @@ def GetCwd(props): else: return "/" - +def IsArchitectureSelected(target): + def CheckArchitectureProperty(step): + try: + options = step.getProperty("options") + if isinstance(options, dict): + selected_arch = options.get("architecture", "all") + if selected_arch != "all" and selected_arch != target: + return False + except KeyError: + pass + + return True + + return CheckArchitectureProperty + +def UsignSec2Pub(seckey, comment="untrusted comment: secret key"): + try: + seckey = base64.b64decode(seckey) + except Exception: + return None + + return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment), + base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:])) + +def IsSharedWorkdir(step): + return bool(step.getProperty("shared_wd")) + +@defer.inlineCallbacks +def getNewestCompleteTime(bldr): + """Returns the complete_at of the latest completed and not SKIPPED + build request for this builder, or None if there are no such build + requests. We need to filter out SKIPPED requests because we're + using collapseRequests=True which is unfortunately marking all + previous requests as complete when new buildset is created. + + @returns: datetime instance or None, via Deferred + """ + + bldrid = yield bldr.getBuilderId() + completed = yield bldr.master.data.get( + ('builders', bldrid, 'buildrequests'), + [ + resultspec.Filter('complete', 'eq', [True]), + resultspec.Filter('results', 'ne', [results.SKIPPED]), + ], + order=['-complete_at'], limit=1) + if not completed: + return + + complete_at = completed[0]['complete_at'] + + last_build = yield bldr.master.data.get( + ('builds', ), + [ + resultspec.Filter('builderid', 'eq', [bldrid]), + ], + order=['-started_at'], limit=1) + + if last_build and last_build[0]: + last_complete_at = last_build[0]['complete_at'] + if last_complete_at and (last_complete_at > complete_at): + return last_complete_at + + return complete_at + +@defer.inlineCallbacks +def prioritizeBuilders(master, builders): + """Returns sorted list of builders by their last timestamp of completed and + not skipped build. + + @returns: list of sorted builders + """ + + def is_building(bldr): + return bool(bldr.building) or bool(bldr.old_building) + + def bldr_info(bldr): + d = defer.maybeDeferred(getNewestCompleteTime, bldr) + d.addCallback(lambda complete_at: (complete_at, bldr)) + return d + + def bldr_sort(item): + (complete_at, bldr) = item + + if not complete_at: + date = datetime.min + complete_at = date.replace(tzinfo=tzutc()) + + if is_building(bldr): + date = datetime.max + complete_at = date.replace(tzinfo=tzutc()) + + return (complete_at, bldr.name) + + results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders]) + results.sort(key=bldr_sort) + + for r in results: + log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0])) + + return [r[1] for r in results] + +c['prioritizeBuilders'] = prioritizeBuilders c['builders'] = [] -dlLock = locks.SlaveLock("slave_dl") +dlLock = locks.WorkerLock("worker_dl") + +workerNames = [ ] -slaveNames = [ ] +for worker in c['workers']: + workerNames.append(worker.workername) -for slave in c['slaves']: - slaveNames.append(slave.slavename) +force_factory = BuildFactory() + +c['builders'].append(BuilderConfig( + name = "00_force_build", + workernames = workerNames, + factory = force_factory)) for arch in arches: ts = arch[1].split('/') factory = BuildFactory() + # setup shared work directory if required + factory.addStep(ShellCommand( + name = "sharedwd", + description = "Setting up shared work directory", + command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")', + workdir = ".", + haltOnFailure = True, + doStepIf = IsSharedWorkdir)) + # find number of cores - factory.addStep(SetProperty( + factory.addStep(SetPropertyFromCommand( name = "nproc", property = "nproc", description = "Finding number of CPUs", command = ["nproc"])) # prepare workspace - factory.addStep(FileDownload(mastersrc="cleanup.sh", slavedest="cleanup.sh", mode=0755)) + factory.addStep(FileDownload( + mastersrc = scripts_dir + '/cleanup.sh', + workerdest = "../cleanup.sh", + mode = 0o755)) if not persistent: factory.addStep(ShellCommand( name = "cleanold", description = "Cleaning previous builds", - command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "full"], + command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"], + workdir = ".", haltOnFailure = True, timeout = 2400)) factory.addStep(ShellCommand( name = "cleanup", description = "Cleaning work area", - command = ["./cleanup.sh", buildbot_url, WithProperties("%(slavename)s"), WithProperties("%(buildername)s"), "single"], - haltOnFailure = True, - timeout = 2400)) - - # expire tree if needed - elif tree_expire > 0: - factory.addStep(FileDownload( - mastersrc = home_dir+"/expire.sh", - slavedest = "../expire.sh", - mode = 0755)) - - factory.addStep(ShellCommand( - name = "expire", - description = "Checking for build tree expiry", - command = ["./expire.sh", str(tree_expire)], + command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"], workdir = ".", haltOnFailure = True, timeout = 2400)) @@ -270,7 +448,7 @@ for arch in arches: factory.addStep(ShellCommand( name = "downloadsdk", description = "Downloading SDK archive", - command = ["rsync", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"], + command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"], env={'RSYNC_PASSWORD': rsync_sdk_key}, haltOnFailure = True, logEnviron = False)) @@ -278,16 +456,59 @@ for arch in arches: factory.addStep(ShellCommand( name = "unpacksdk", description = "Unpacking SDK archive", - command = ["tar", "--keep-newer-files", "--strip-components=1", "-C", "sdk/", "-vxf", "sdk.archive"], + command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive", + haltOnFailure = True)) + + factory.addStep(ShellCommand( + name = "updatesdk", + description = "Updating SDK", + command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update", + haltOnFailure = True)) + + factory.addStep(ShellCommand( + name = "cleancmdlinks", + description = "Sanitizing host command symlinks", + command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;", haltOnFailure = True)) - factory.addStep(FileDownload(mastersrc=home_dir+'/key-build', slavedest="sdk/key-build", mode=0600)) - factory.addStep(FileDownload(mastersrc=home_dir+'/key-build.pub', slavedest="sdk/key-build.pub", mode=0600)) + factory.addStep(StringDownload( + name = "writeversionmk", + s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n', + workerdest = "sdk/getversion.mk", + mode = 0o755)) + + factory.addStep(SetPropertyFromCommand( + name = "getversion", + property = "release_version", + description = "Finding SDK release version", + workdir = "build/sdk", + command = ["make", "-f", "getversion.mk"])) + + # install build key + if usign_key is not None: + factory.addStep(StringDownload( + name = "dlkeybuildpub", + s = UsignSec2Pub(usign_key, usign_comment), + workerdest = "sdk/key-build.pub", + mode = 0o600)) + + factory.addStep(StringDownload( + name = "dlkeybuild", + s = "# fake private key", + workerdest = "sdk/key-build", + mode = 0o600)) + + factory.addStep(StringDownload( + name = "dlkeybuilducert", + s = "# fake certificate", + workerdest = "sdk/key-build.ucert", + mode = 0o600)) factory.addStep(ShellCommand( name = "mkdldir", description = "Preparing download directory", - command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"])) + command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"], + haltOnFailure = True)) factory.addStep(ShellCommand( name = "mkconf", @@ -296,34 +517,49 @@ for arch in arches: command = ["sh", "-c", "rm -f .config && make defconfig"])) factory.addStep(FileDownload( - mastersrc = home_dir+'/ccache.sh', - slavedest = 'sdk/ccache.sh', - mode = 0755)) + mastersrc = scripts_dir + '/ccache.sh', + workerdest = 'sdk/ccache.sh', + mode = 0o755)) factory.addStep(ShellCommand( name = "prepccache", description = "Preparing ccache", workdir = "build/sdk", - command = ["./ccache.sh"])) + command = ["./ccache.sh"], + haltOnFailure = True)) factory.addStep(ShellCommand( name = "updatefeeds", description = "Updating feeds", workdir = "build/sdk", - command = ["./scripts/feeds", "update"])) + command = ["./scripts/feeds", "update", "-f"], + haltOnFailure = True)) factory.addStep(ShellCommand( name = "installfeeds", description = "Installing feeds", workdir = "build/sdk", - command = ["./scripts/feeds", "install", "-a"])) + command = ["./scripts/feeds", "install", "-a"], + haltOnFailure = True)) + + factory.addStep(ShellCommand( + name = "logclear", + description = "Clearing failure logs", + workdir = "build/sdk", + command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"], + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = True, + )) factory.addStep(ShellCommand( name = "compile", description = "Building packages", workdir = "build/sdk", - command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_SIGNED_PACKAGES=y", "CONFIG_AUTOREMOVE=y"], - env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)})) + timeout = 3600, + command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="], + env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)}, + haltOnFailure = True)) factory.addStep(ShellCommand( name = "mkfeedsconf", @@ -331,11 +567,23 @@ for arch in arches: workdir = "build/sdk", command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0]))) - if gpg_keyid is not None: + factory.addStep(ShellCommand( + name = "checksums", + description = "Calculating checksums", + descriptionDone="Checksums calculated", + workdir = "build/sdk", + command = "cd bin/packages/%s; " %(arch[0]) + + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | " + + "sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | " + + r"sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums", + haltOnFailure = True + )) + + if ini.has_option("gpg", "key") or usign_key is not None: factory.addStep(MasterShellCommand( name = "signprepare", description = "Preparing temporary signing directory", - command = ["mkdir", "-p", "%s/signing" %(home_dir)], + command = ["mkdir", "-p", "%s/signing" %(work_dir)], haltOnFailure = True )) @@ -348,22 +596,22 @@ for arch in arches: )) factory.addStep(FileUpload( - slavesrc = "sdk/sign.tar.gz", - masterdest = "%s/signing/%s.tar.gz" %(home_dir, arch[0]), + workersrc = "sdk/sign.tar.gz", + masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]), haltOnFailure = True )) factory.addStep(MasterShellCommand( name = "signfiles", description = "Signing files", - command = ["%s/signall.sh" %(home_dir), "%s/signing/%s.tar.gz" %(home_dir, arch[0]), gpg_keyid, gpg_comment], - env = {'GNUPGHOME': gpg_home, 'PASSFILE': gpg_passfile}, + command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])], + env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") }, haltOnFailure = True )) factory.addStep(FileDownload( - mastersrc = "%s/signing/%s.tar.gz" %(home_dir, arch[0]), - slavedest = "sdk/sign.tar.gz", + mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]), + workerdest = "sdk/sign.tar.gz", haltOnFailure = True )) @@ -375,11 +623,45 @@ for arch in arches: haltOnFailure = True )) + # download remote sha256sums to 'target-sha256sums' + factory.addStep(ShellCommand( + name = "target-sha256sums", + description = "Fetching remote sha256sums for arch", + command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"], + env={'RSYNC_PASSWORD': rsync_bin_key}, + logEnviron = False, + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = False, + )) + + factory.addStep(FileDownload( + name="dlrsync.sh", + mastersrc = scripts_dir + "/rsync.sh", + workerdest = "../rsync.sh", + mode = 0o755 + )) + + factory.addStep(FileDownload( + name = "dlsha2rsyncpl", + mastersrc = scripts_dir + "/sha2rsync.pl", + workerdest = "../sha2rsync.pl", + mode = 0o755, + )) + + factory.addStep(ShellCommand( + name = "buildlist", + description = "Building list of files to upload", + workdir = "build/sdk", + command = ["../../sha2rsync.pl", "../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"], + haltOnFailure = True, + )) + factory.addStep(ShellCommand( name = "uploadprepare", description = "Preparing package directory", workdir = "build/sdk", - command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)], + command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)], env={'RSYNC_PASSWORD': rsync_bin_key}, haltOnFailure = True, logEnviron = False @@ -389,7 +671,17 @@ for arch in arches: name = "packageupload", description = "Uploading package files", workdir = "build/sdk", - command = ["rsync", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)], + command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])], + env={'RSYNC_PASSWORD': rsync_bin_key}, + haltOnFailure = True, + logEnviron = False + )) + + factory.addStep(ShellCommand( + name = "packageprune", + description = "Pruning package files", + workdir = "build/sdk", + command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])], env={'RSYNC_PASSWORD': rsync_bin_key}, haltOnFailure = True, logEnviron = False @@ -399,7 +691,7 @@ for arch in arches: name = "logprepare", description = "Preparing log directory", workdir = "build/sdk", - command = ["rsync", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", "%s/faillogs/" %(rsync_bin_url)], + command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)], env={'RSYNC_PASSWORD': rsync_bin_key}, haltOnFailure = True, logEnviron = False @@ -410,42 +702,98 @@ for arch in arches: description = "Finding failure logs", workdir = "build/sdk/logs/package/feeds", command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"], - haltOnFailure = False + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = True, )) factory.addStep(ShellCommand( name = "logcollect", description = "Collecting failure logs", workdir = "build/sdk", - command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"], - haltOnFailure = False + command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"], + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = True, )) factory.addStep(ShellCommand( name = "logupload", description = "Uploading failure logs", workdir = "build/sdk", - command = ["rsync", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", "%s/faillogs/%s/" %(rsync_bin_url, arch[0])], + command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])], env={'RSYNC_PASSWORD': rsync_bin_key}, haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = True, logEnviron = False )) if rsync_src_url is not None: + factory.addStep(ShellCommand( + name = "sourcelist", + description = "Finding source archives to upload", + workdir = "build/sdk", + command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist", + haltOnFailure = True + )) + factory.addStep(ShellCommand( name = "sourceupload", description = "Uploading source archives", workdir = "build/sdk", - command = ["rsync", "--progress", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)], + command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates", + Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)], env={'RSYNC_PASSWORD': rsync_src_key}, haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = True, logEnviron = False )) - from buildbot.config import BuilderConfig + factory.addStep(ShellCommand( + name = "df", + description = "Reporting disk usage", + command=["df", "-h", "."], + env={'LC_ALL': 'C'}, + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = False, + alwaysRun = True + )) - c['builders'].append(BuilderConfig(name=arch[0], slavenames=slaveNames, factory=factory)) + factory.addStep(ShellCommand( + name = "du", + description = "Reporting estimated file space usage", + command=["du", "-sh", "."], + env={'LC_ALL': 'C'}, + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = False, + alwaysRun = True + )) + factory.addStep(ShellCommand( + name = "ccachestat", + description = "Reporting ccache stats", + command=["ccache", "-s"], + want_stderr = False, + haltOnFailure = False, + flunkOnFailure = False, + warnOnFailure = False, + alwaysRun = True, + )) + + c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory)) + + c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ])) + force_factory.addStep(steps.Trigger( + name = "trigger_%s" % arch[0], + description = "Triggering %s build" % arch[0], + schedulerNames = [ "trigger_%s" % arch[0] ], + set_properties = { "reason": Property("reason") }, + doStepIf = IsArchitectureSelected(arch[0]) + )) ####### STATUS arches @@ -453,28 +801,24 @@ for arch in arches: # pushed to these arches. buildbot/status/*.py has a variety to choose from, # including web pages, email senders, and IRC bots. -c['status'] = [] - -from buildbot.status import html -from buildbot.status.web import authz, auth - -if ini.has_option("status", "bind"): - if ini.has_option("status", "user") and ini.has_option("status", "password"): - authz_cfg=authz.Authz( - # change any of these to True to enable; see the manual for more - # options - auth=auth.BasicAuth([(ini.get("status", "user"), ini.get("status", "password"))]), - gracefulShutdown = 'auth', - forceBuild = 'auth', # use this to test your slave once it is set up - forceAllBuilds = 'auth', - pingBuilder = False, - stopBuild = 'auth', - stopAllBuilds = 'auth', - cancelPendingBuild = 'auth', +if ini.has_option("phase2", "status_bind"): + c['www'] = { + 'port': ini.get("phase2", "status_bind"), + 'plugins': { + 'waterfall_view': True, + 'console_view': True, + 'grid_view': True + } + } + + if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"): + c['www']['auth'] = util.UserPasswordAuth([ + (ini.get("phase2", "status_user"), ini.get("phase2", "status_password")) + ]) + c['www']['authz'] = util.Authz( + allowRules=[ util.AnyControlEndpointMatcher(role="admins") ], + roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ] ) - c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"), authz=authz_cfg)) - else: - c['status'].append(html.WebStatus(http_port=ini.get("status", "bind"))) ####### PROJECT IDENTITY @@ -500,3 +844,5 @@ c['db'] = { # this at its default for all but the largest installations. 'db_url' : "sqlite:///state.sqlite", } + +c['buildbotNetUsageData'] = None