14378259c0c2cbaf2ea1e606345bf0bf9970e320
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65 tree_expire = 0
66 git_ssh = False
67 git_ssh_key = None
68
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
71
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
74
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
77
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
80
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
83 else:
84 git_ssh = False
85
86 c['workers'] = []
87 max_builds = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
96 max_builds[name] = 1
97
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
100
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
103
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
108
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
110
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
113 # --master option)
114 c['protocols'] = {'pb': {'port': worker_port}}
115
116 # coalesce builds
117 c['collapseRequests'] = True
118
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
122 hour=6,
123 )]
124
125 ####### CHANGESOURCES
126
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
129
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
132
133 rsync_src_url = None
134 rsync_src_key = None
135
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
139
140 rsync_sdk_url = None
141 rsync_sdk_key = None
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
143
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
146
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
149
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
152
153 repo_url = ini.get("repo", "url")
154 repo_branch = "master"
155
156 if ini.has_option("repo", "branch"):
157 repo_branch = ini.get("repo", "branch")
158
159 usign_key = None
160 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
161
162 if ini.has_option("usign", "key"):
163 usign_key = ini.get("usign", "key")
164
165 if ini.has_option("usign", "comment"):
166 usign_comment = ini.get("usign", "comment")
167
168
169 # find arches
170 arches = [ ]
171 archnames = [ ]
172
173 if not os.path.isdir(work_dir+'/source.git'):
174 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
175 else:
176 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
177
178 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
179 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
180 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
181
182 while True:
183 line = findarches.stdout.readline()
184 if not line:
185 break
186 at = line.decode().strip().split()
187 arches.append(at)
188 archnames.append(at[0])
189
190
191 # find feeds
192 feeds = []
193 feedbranches = dict()
194
195 c['change_source'] = []
196
197 def parse_feed_entry(line):
198 parts = line.strip().split()
199 if parts[0].startswith("src-git"):
200 feeds.append(parts)
201 url = parts[2].strip().split(';')
202 branch = url[1] if len(url) > 1 else 'master'
203 feedbranches[url[0]] = branch
204 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
205
206 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
207 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
208
209 line = make.stdout.readline()
210 if line:
211 parse_feed_entry(str(line, 'utf-8'))
212
213 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
214 for line in f:
215 parse_feed_entry(line)
216
217 if len(c['change_source']) == 0:
218 log.err("FATAL ERROR: no change_sources defined, aborting!")
219 sys.exit(-1)
220
221 ####### SCHEDULERS
222
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
225
226 c['schedulers'] = []
227 c['schedulers'].append(SingleBranchScheduler(
228 name = "all",
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
231 ),
232 treeStableTimer = 60,
233 builderNames = archnames))
234
235 c['schedulers'].append(ForceScheduler(
236 name = "force",
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
240
241 codebases = [
242 util.CodebaseParameter(
243 "",
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
249 )
250 ],
251
252 reason = util.StringParameter(
253 name = "reason",
254 label = "Reason",
255 default = "Trigger build",
256 required = True,
257 size = 80
258 ),
259
260 properties = [
261 util.NestedParameter(
262 name="options",
263 label="Build Options",
264 layout="vertical",
265 fields=[
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
269 default = "all",
270 choices = [ "all" ] + archnames
271 )
272 ]
273 )
274 ]
275 ))
276
277 ####### BUILDERS
278
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
282
283 @properties.renderer
284 def GetDirectorySuffix(props):
285 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
286 if props.hasProperty("release_version"):
287 m = verpat.match(props["release_version"])
288 if m is not None:
289 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
290 return ""
291
292 @properties.renderer
293 def GetNumJobs(props):
294 if props.hasProperty("workername") and props.hasProperty("nproc"):
295 return str(int(props["nproc"]) / max_builds[props["workername"]])
296 else:
297 return "1"
298
299 @properties.renderer
300 def GetCwd(props):
301 if props.hasProperty("builddir"):
302 return props["builddir"]
303 elif props.hasProperty("workdir"):
304 return props["workdir"]
305 else:
306 return "/"
307
308 def IsArchitectureSelected(target):
309 def CheckArchitectureProperty(step):
310 try:
311 options = step.getProperty("options")
312 if type(options) is dict:
313 selected_arch = options.get("architecture", "all")
314 if selected_arch != "all" and selected_arch != target:
315 return False
316 except KeyError:
317 pass
318
319 return True
320
321 return CheckArchitectureProperty
322
323 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
324 try:
325 seckey = base64.b64decode(seckey)
326 except Exception:
327 return None
328
329 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
330 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
331
332 def IsSharedWorkdir(step):
333 return bool(step.getProperty("shared_wd"))
334
335 @defer.inlineCallbacks
336 def getNewestCompleteTime(bldr):
337 """Returns the complete_at of the latest completed and not SKIPPED
338 build request for this builder, or None if there are no such build
339 requests. We need to filter out SKIPPED requests because we're
340 using collapseRequests=True which is unfortunately marking all
341 previous requests as complete when new buildset is created.
342
343 @returns: datetime instance or None, via Deferred
344 """
345
346 bldrid = yield bldr.getBuilderId()
347 completed = yield bldr.master.data.get(
348 ('builders', bldrid, 'buildrequests'),
349 [
350 resultspec.Filter('complete', 'eq', [True]),
351 resultspec.Filter('results', 'ne', [results.SKIPPED]),
352 ],
353 order=['-complete_at'], limit=1)
354 if not completed:
355 return
356
357 complete_at = completed[0]['complete_at']
358
359 last_build = yield bldr.master.data.get(
360 ('builds', ),
361 [
362 resultspec.Filter('builderid', 'eq', [bldrid]),
363 ],
364 order=['-started_at'], limit=1)
365
366 if last_build and last_build[0]:
367 last_complete_at = last_build[0]['complete_at']
368 if last_complete_at and (last_complete_at > complete_at):
369 return last_complete_at
370
371 return complete_at
372
373 @defer.inlineCallbacks
374 def prioritizeBuilders(master, builders):
375 """Returns sorted list of builders by their last timestamp of completed and
376 not skipped build.
377
378 @returns: list of sorted builders
379 """
380
381 def is_building(bldr):
382 return bool(bldr.building) or bool(bldr.old_building)
383
384 def bldr_info(bldr):
385 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
386 d.addCallback(lambda complete_at: (complete_at, bldr))
387 return d
388
389 def bldr_sort(item):
390 (complete_at, bldr) = item
391
392 if not complete_at:
393 date = datetime.min
394 complete_at = date.replace(tzinfo=tzutc())
395
396 if is_building(bldr):
397 date = datetime.max
398 complete_at = date.replace(tzinfo=tzutc())
399
400 return (complete_at, bldr.name)
401
402 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
403 results.sort(key=bldr_sort)
404
405 for r in results:
406 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
407
408 return [r[1] for r in results]
409
410 c['prioritizeBuilders'] = prioritizeBuilders
411 c['builders'] = []
412
413 dlLock = locks.WorkerLock("worker_dl")
414
415 workerNames = [ ]
416
417 for worker in c['workers']:
418 workerNames.append(worker.workername)
419
420 force_factory = BuildFactory()
421
422 c['builders'].append(BuilderConfig(
423 name = "00_force_build",
424 workernames = workerNames,
425 factory = force_factory))
426
427 for arch in arches:
428 ts = arch[1].split('/')
429
430 factory = BuildFactory()
431
432 # setup shared work directory if required
433 factory.addStep(ShellCommand(
434 name = "sharedwd",
435 description = "Setting up shared work directory",
436 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
437 workdir = ".",
438 haltOnFailure = True,
439 doStepIf = IsSharedWorkdir))
440
441 # find number of cores
442 factory.addStep(SetPropertyFromCommand(
443 name = "nproc",
444 property = "nproc",
445 description = "Finding number of CPUs",
446 command = ["nproc"]))
447
448 # prepare workspace
449 factory.addStep(FileDownload(
450 mastersrc = scripts_dir + '/cleanup.sh',
451 workerdest = "../cleanup.sh",
452 mode = 0o755))
453
454 if not persistent:
455 factory.addStep(ShellCommand(
456 name = "cleanold",
457 description = "Cleaning previous builds",
458 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
459 workdir = ".",
460 haltOnFailure = True,
461 timeout = 2400))
462
463 factory.addStep(ShellCommand(
464 name = "cleanup",
465 description = "Cleaning work area",
466 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
467 workdir = ".",
468 haltOnFailure = True,
469 timeout = 2400))
470
471 # expire tree if needed
472 elif tree_expire > 0:
473 factory.addStep(FileDownload(
474 mastersrc = scripts_dir + '/expire.sh',
475 workerdest = "../expire.sh",
476 mode = 0o755))
477
478 factory.addStep(ShellCommand(
479 name = "expire",
480 description = "Checking for build tree expiry",
481 command = ["./expire.sh", str(tree_expire)],
482 workdir = ".",
483 haltOnFailure = True,
484 timeout = 2400))
485
486 factory.addStep(ShellCommand(
487 name = "mksdkdir",
488 description = "Preparing SDK directory",
489 command = ["mkdir", "-p", "sdk"],
490 haltOnFailure = True))
491
492 factory.addStep(ShellCommand(
493 name = "downloadsdk",
494 description = "Downloading SDK archive",
495 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
496 env={'RSYNC_PASSWORD': rsync_sdk_key},
497 haltOnFailure = True,
498 logEnviron = False))
499
500 factory.addStep(ShellCommand(
501 name = "unpacksdk",
502 description = "Unpacking SDK archive",
503 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
504 haltOnFailure = True))
505
506 factory.addStep(ShellCommand(
507 name = "updatesdk",
508 description = "Updating SDK",
509 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
510 haltOnFailure = True))
511
512 factory.addStep(ShellCommand(
513 name = "cleancmdlinks",
514 description = "Sanitizing host command symlinks",
515 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
516 haltOnFailure = True))
517
518 factory.addStep(StringDownload(
519 name = "writeversionmk",
520 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
521 workerdest = "sdk/getversion.mk",
522 mode = 0o755))
523
524 factory.addStep(SetPropertyFromCommand(
525 name = "getversion",
526 property = "release_version",
527 description = "Finding SDK release version",
528 workdir = "build/sdk",
529 command = ["make", "-f", "getversion.mk"]))
530
531 # install build key
532 if usign_key is not None:
533 factory.addStep(StringDownload(
534 name = "dlkeybuildpub",
535 s = UsignSec2Pub(usign_key, usign_comment),
536 workerdest = "sdk/key-build.pub",
537 mode = 0o600))
538
539 factory.addStep(StringDownload(
540 name = "dlkeybuild",
541 s = "# fake private key",
542 workerdest = "sdk/key-build",
543 mode = 0o600))
544
545 factory.addStep(StringDownload(
546 name = "dlkeybuilducert",
547 s = "# fake certificate",
548 workerdest = "sdk/key-build.ucert",
549 mode = 0o600))
550
551 factory.addStep(ShellCommand(
552 name = "mkdldir",
553 description = "Preparing download directory",
554 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
555 haltOnFailure = True))
556
557 factory.addStep(ShellCommand(
558 name = "mkconf",
559 description = "Preparing SDK configuration",
560 workdir = "build/sdk",
561 command = ["sh", "-c", "rm -f .config && make defconfig"]))
562
563 factory.addStep(FileDownload(
564 mastersrc = scripts_dir + '/ccache.sh',
565 workerdest = 'sdk/ccache.sh',
566 mode = 0o755))
567
568 factory.addStep(ShellCommand(
569 name = "prepccache",
570 description = "Preparing ccache",
571 workdir = "build/sdk",
572 command = ["./ccache.sh"],
573 haltOnFailure = True))
574
575 if git_ssh:
576 factory.addStep(StringDownload(
577 name = "dlgitclonekey",
578 s = git_ssh_key,
579 workerdest = "../git-clone.key",
580 mode = 0o600))
581
582 factory.addStep(ShellCommand(
583 name = "patchfeedsconf",
584 description = "Patching feeds.conf",
585 workdir = "build/sdk",
586 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
587 haltOnFailure = True))
588
589 factory.addStep(ShellCommand(
590 name = "updatefeeds",
591 description = "Updating feeds",
592 workdir = "build/sdk",
593 command = ["./scripts/feeds", "update", "-f"],
594 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
595 haltOnFailure = True))
596
597 if git_ssh:
598 factory.addStep(ShellCommand(
599 name = "rmfeedsconf",
600 description = "Removing feeds.conf",
601 workdir = "build/sdk",
602 command=["rm", "feeds.conf"],
603 haltOnFailure = True))
604
605 factory.addStep(ShellCommand(
606 name = "installfeeds",
607 description = "Installing feeds",
608 workdir = "build/sdk",
609 command = ["./scripts/feeds", "install", "-a"],
610 haltOnFailure = True))
611
612 factory.addStep(ShellCommand(
613 name = "logclear",
614 description = "Clearing failure logs",
615 workdir = "build/sdk",
616 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
617 haltOnFailure = False,
618 flunkOnFailure = False,
619 warnOnFailure = True,
620 ))
621
622 factory.addStep(ShellCommand(
623 name = "compile",
624 description = "Building packages",
625 workdir = "build/sdk",
626 timeout = 3600,
627 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
628 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
629 haltOnFailure = True))
630
631 factory.addStep(ShellCommand(
632 name = "mkfeedsconf",
633 description = "Generating pinned feeds.conf",
634 workdir = "build/sdk",
635 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
636
637 if ini.has_option("gpg", "key") or usign_key is not None:
638 factory.addStep(MasterShellCommand(
639 name = "signprepare",
640 description = "Preparing temporary signing directory",
641 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
642 haltOnFailure = True
643 ))
644
645 factory.addStep(ShellCommand(
646 name = "signpack",
647 description = "Packing files to sign",
648 workdir = "build/sdk",
649 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
650 haltOnFailure = True
651 ))
652
653 factory.addStep(FileUpload(
654 workersrc = "sdk/sign.tar.gz",
655 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
656 haltOnFailure = True
657 ))
658
659 factory.addStep(MasterShellCommand(
660 name = "signfiles",
661 description = "Signing files",
662 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
663 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
664 haltOnFailure = True
665 ))
666
667 factory.addStep(FileDownload(
668 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
669 workerdest = "sdk/sign.tar.gz",
670 haltOnFailure = True
671 ))
672
673 factory.addStep(ShellCommand(
674 name = "signunpack",
675 description = "Unpacking signed files",
676 workdir = "build/sdk",
677 command = ["tar", "-xzf", "sign.tar.gz"],
678 haltOnFailure = True
679 ))
680
681 factory.addStep(ShellCommand(
682 name = "uploadprepare",
683 description = "Preparing package directory",
684 workdir = "build/sdk",
685 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
686 env={'RSYNC_PASSWORD': rsync_bin_key},
687 haltOnFailure = True,
688 logEnviron = False
689 ))
690
691 factory.addStep(ShellCommand(
692 name = "packageupload",
693 description = "Uploading package files",
694 workdir = "build/sdk",
695 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
696 env={'RSYNC_PASSWORD': rsync_bin_key},
697 haltOnFailure = True,
698 logEnviron = False
699 ))
700
701 factory.addStep(ShellCommand(
702 name = "logprepare",
703 description = "Preparing log directory",
704 workdir = "build/sdk",
705 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
706 env={'RSYNC_PASSWORD': rsync_bin_key},
707 haltOnFailure = True,
708 logEnviron = False
709 ))
710
711 factory.addStep(ShellCommand(
712 name = "logfind",
713 description = "Finding failure logs",
714 workdir = "build/sdk/logs/package/feeds",
715 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
716 haltOnFailure = False,
717 flunkOnFailure = False,
718 warnOnFailure = True,
719 ))
720
721 factory.addStep(ShellCommand(
722 name = "logcollect",
723 description = "Collecting failure logs",
724 workdir = "build/sdk",
725 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
726 haltOnFailure = False,
727 flunkOnFailure = False,
728 warnOnFailure = True,
729 ))
730
731 factory.addStep(ShellCommand(
732 name = "logupload",
733 description = "Uploading failure logs",
734 workdir = "build/sdk",
735 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
736 env={'RSYNC_PASSWORD': rsync_bin_key},
737 haltOnFailure = False,
738 flunkOnFailure = False,
739 warnOnFailure = True,
740 logEnviron = False
741 ))
742
743 if rsync_src_url is not None:
744 factory.addStep(ShellCommand(
745 name = "sourcelist",
746 description = "Finding source archives to upload",
747 workdir = "build/sdk",
748 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
749 haltOnFailure = True
750 ))
751
752 factory.addStep(ShellCommand(
753 name = "sourceupload",
754 description = "Uploading source archives",
755 workdir = "build/sdk",
756 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
757 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-avz", "dl/", "%s/" %(rsync_src_url)],
758 env={'RSYNC_PASSWORD': rsync_src_key},
759 haltOnFailure = False,
760 flunkOnFailure = False,
761 warnOnFailure = True,
762 logEnviron = False
763 ))
764
765 factory.addStep(ShellCommand(
766 name = "df",
767 description = "Reporting disk usage",
768 command=["df", "-h", "."],
769 env={'LC_ALL': 'C'},
770 haltOnFailure = False,
771 flunkOnFailure = False,
772 warnOnFailure = False,
773 alwaysRun = True
774 ))
775
776 factory.addStep(ShellCommand(
777 name = "du",
778 description = "Reporting estimated file space usage",
779 command=["du", "-sh", "."],
780 env={'LC_ALL': 'C'},
781 haltOnFailure = False,
782 flunkOnFailure = False,
783 warnOnFailure = False,
784 alwaysRun = True
785 ))
786
787 factory.addStep(ShellCommand(
788 name = "ccachestat",
789 description = "Reporting ccache stats",
790 command=["ccache", "-s"],
791 want_stderr = False,
792 haltOnFailure = False,
793 flunkOnFailure = False,
794 warnOnFailure = False,
795 alwaysRun = True,
796 ))
797
798 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
799
800 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
801 force_factory.addStep(steps.Trigger(
802 name = "trigger_%s" % arch[0],
803 description = "Triggering %s build" % arch[0],
804 schedulerNames = [ "trigger_%s" % arch[0] ],
805 set_properties = { "reason": Property("reason") },
806 doStepIf = IsArchitectureSelected(arch[0])
807 ))
808
809 ####### STATUS arches
810
811 # 'status' is a list of Status arches. The results of each build will be
812 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
813 # including web pages, email senders, and IRC bots.
814
815 if ini.has_option("phase2", "status_bind"):
816 c['www'] = {
817 'port': ini.get("phase2", "status_bind"),
818 'plugins': {
819 'waterfall_view': True,
820 'console_view': True,
821 'grid_view': True
822 }
823 }
824
825 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
826 c['www']['auth'] = util.UserPasswordAuth([
827 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
828 ])
829 c['www']['authz'] = util.Authz(
830 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
831 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
832 )
833
834 ####### PROJECT IDENTITY
835
836 # the 'title' string will appear at the top of this buildbot
837 # installation's html.WebStatus home page (linked to the
838 # 'titleURL') and is embedded in the title of the waterfall HTML page.
839
840 c['title'] = ini.get("general", "title")
841 c['titleURL'] = ini.get("general", "title_url")
842
843 # the 'buildbotURL' string should point to the location where the buildbot's
844 # internal web server (usually the html.WebStatus page) is visible. This
845 # typically uses the port number set in the Waterfall 'status' entry, but
846 # with an externally-visible host name which the buildbot cannot figure out
847 # without some help.
848
849 c['buildbotURL'] = buildbot_url
850
851 ####### DB URL
852
853 c['db'] = {
854 # This specifies what database buildbot uses to store its state. You can leave
855 # this at its default for all but the largest installations.
856 'db_url' : "sqlite:///state.sqlite",
857 }
858
859 c['buildbotNetUsageData'] = None