phase1,phase2: implement round robin builds
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import random
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
38
39
40 ini = configparser.ConfigParser()
41 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
42
43 buildbot_url = ini.get("phase2", "buildbot_url")
44
45 # This is a sample buildmaster config file. It must be installed as
46 # 'master.cfg' in your buildmaster's base directory.
47
48 # This is the dictionary that the buildmaster pays attention to. We also use
49 # a shorter alias to save typing.
50 c = BuildmasterConfig = {}
51
52 ####### BUILDWORKERS
53
54 # The 'workers' list defines the set of recognized buildworkers. Each element is
55 # a Worker object, specifying a unique worker name and password. The same
56 # worker name and password must be configured on the worker.
57
58 worker_port = 9990
59 persistent = False
60 other_builds = 0
61 tree_expire = 0
62 git_ssh = False
63 git_ssh_key = None
64
65 if ini.has_option("phase2", "port"):
66 worker_port = ini.get("phase2", "port")
67
68 if ini.has_option("phase2", "persistent"):
69 persistent = ini.getboolean("phase2", "persistent")
70
71 if ini.has_option("phase2", "other_builds"):
72 other_builds = ini.getint("phase2", "other_builds")
73
74 if ini.has_option("phase2", "expire"):
75 tree_expire = ini.getint("phase2", "expire")
76
77 if ini.has_option("general", "git_ssh"):
78 git_ssh = ini.getboolean("general", "git_ssh")
79
80 if ini.has_option("general", "git_ssh_key"):
81 git_ssh_key = ini.get("general", "git_ssh_key")
82 else:
83 git_ssh = False
84
85 c['workers'] = []
86 max_builds = dict()
87
88 for section in ini.sections():
89 if section.startswith("worker "):
90 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
91 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
92 name = ini.get(section, "name")
93 password = ini.get(section, "password")
94 sl_props = { 'shared_wd': False }
95 max_builds[name] = 1
96
97 if ini.has_option(section, "builds"):
98 max_builds[name] = ini.getint(section, "builds")
99
100 if max_builds[name] == 1:
101 sl_props['shared_wd'] = True
102
103 if ini.has_option(section, "shared_wd"):
104 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
105 if sl_props['shared_wd'] and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
107
108 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
109
110 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
111 # This must match the value configured into the buildworkers (with their
112 # --master option)
113 c['protocols'] = {'pb': {'port': worker_port}}
114
115 # coalesce builds
116 c['collapseRequests'] = True
117
118 # Reduce amount of backlog data
119 c['configurators'] = [util.JanitorConfigurator(
120 logHorizon=timedelta(days=3),
121 hour=6,
122 )]
123
124 ####### CHANGESOURCES
125
126 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
127 scripts_dir = os.path.abspath("../scripts")
128
129 rsync_bin_url = ini.get("rsync", "binary_url")
130 rsync_bin_key = ini.get("rsync", "binary_password")
131
132 rsync_src_url = None
133 rsync_src_key = None
134
135 if ini.has_option("rsync", "source_url"):
136 rsync_src_url = ini.get("rsync", "source_url")
137 rsync_src_key = ini.get("rsync", "source_password")
138
139 rsync_sdk_url = None
140 rsync_sdk_key = None
141 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
142
143 if ini.has_option("rsync", "sdk_url"):
144 rsync_sdk_url = ini.get("rsync", "sdk_url")
145
146 if ini.has_option("rsync", "sdk_password"):
147 rsync_sdk_key = ini.get("rsync", "sdk_password")
148
149 if ini.has_option("rsync", "sdk_pattern"):
150 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
151
152 repo_url = ini.get("repo", "url")
153 repo_branch = "master"
154
155 if ini.has_option("repo", "branch"):
156 repo_branch = ini.get("repo", "branch")
157
158 usign_key = None
159 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
160
161 if ini.has_option("usign", "key"):
162 usign_key = ini.get("usign", "key")
163
164 if ini.has_option("usign", "comment"):
165 usign_comment = ini.get("usign", "comment")
166
167
168 # find arches
169 arches = [ ]
170 archnames = [ ]
171
172 if not os.path.isdir(work_dir+'/source.git'):
173 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
174 else:
175 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
176
177 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
178 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
179 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
180
181 while True:
182 line = findarches.stdout.readline()
183 if not line:
184 break
185 at = line.decode().strip().split()
186 arches.append(at)
187 archnames.append(at[0])
188
189
190 # find feeds
191 feeds = []
192 feedbranches = dict()
193
194 c['change_source'] = []
195
196 def parse_feed_entry(line):
197 parts = line.strip().split()
198 if parts[0] == "src-git":
199 feeds.append(parts)
200 url = parts[2].strip().split(';')
201 branch = url[1] if len(url) > 1 else 'master'
202 feedbranches[url[0]] = branch
203 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
204
205 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
206 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
207
208 line = make.stdout.readline()
209 if line:
210 parse_feed_entry(line)
211
212 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
213 for line in f:
214 parse_feed_entry(line)
215
216
217 ####### SCHEDULERS
218
219 # Configure the Schedulers, which decide how to react to incoming changes. In this
220 # case, just kick off a 'basebuild' build
221
222 c['schedulers'] = []
223 c['schedulers'].append(SingleBranchScheduler(
224 name = "all",
225 change_filter = filter.ChangeFilter(
226 filter_fn = lambda change: change.branch == feedbranches[change.repository]
227 ),
228 treeStableTimer = 60,
229 builderNames = archnames))
230
231 c['schedulers'].append(ForceScheduler(
232 name = "force",
233 buttonName = "Force builds",
234 label = "Force build details",
235 builderNames = [ "00_force_build" ],
236
237 codebases = [
238 util.CodebaseParameter(
239 "",
240 label = "Repository",
241 branch = util.FixedParameter(name = "branch", default = ""),
242 revision = util.FixedParameter(name = "revision", default = ""),
243 repository = util.FixedParameter(name = "repository", default = ""),
244 project = util.FixedParameter(name = "project", default = "")
245 )
246 ],
247
248 reason = util.StringParameter(
249 name = "reason",
250 label = "Reason",
251 default = "Trigger build",
252 required = True,
253 size = 80
254 ),
255
256 properties = [
257 util.NestedParameter(
258 name="options",
259 label="Build Options",
260 layout="vertical",
261 fields=[
262 util.ChoiceStringParameter(
263 name = "architecture",
264 label = "Build architecture",
265 default = "all",
266 choices = [ "all" ] + archnames
267 )
268 ]
269 )
270 ]
271 ))
272
273 ####### BUILDERS
274
275 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
276 # what steps, and which workers can execute them. Note that any particular build will
277 # only take place on one worker.
278
279 def GetDirectorySuffix(props):
280 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
281 if props.hasProperty("release_version"):
282 m = verpat.match(props["release_version"])
283 if m is not None:
284 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
285 return ""
286
287 def GetNumJobs(props):
288 if props.hasProperty("workername") and props.hasProperty("nproc"):
289 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
290 else:
291 return 1
292
293 def GetCwd(props):
294 if props.hasProperty("builddir"):
295 return props["builddir"]
296 elif props.hasProperty("workdir"):
297 return props["workdir"]
298 else:
299 return "/"
300
301 def IsArchitectureSelected(target):
302 def CheckArchitectureProperty(step):
303 try:
304 options = step.getProperty("options")
305 if type(options) is dict:
306 selected_arch = options.get("architecture", "all")
307 if selected_arch != "all" and selected_arch != target:
308 return False
309 except KeyError:
310 pass
311
312 return True
313
314 return CheckArchitectureProperty
315
316 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
317 try:
318 seckey = base64.b64decode(seckey)
319 except:
320 return None
321
322 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
323 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
324
325 def IsSharedWorkdir(step):
326 return bool(step.getProperty("shared_wd"))
327
328 @defer.inlineCallbacks
329 def getNewestCompleteTime(bldr):
330 """Returns the complete_at of the latest completed and not SKIPPED
331 build request for this builder, or None if there are no such build
332 requests. We need to filter out SKIPPED requests because we're
333 using collapseRequests=True which is unfortunately marking all
334 previous requests as complete when new buildset is created.
335
336 @returns: datetime instance or None, via Deferred
337 """
338
339 bldrid = yield bldr.getBuilderId()
340 completed = yield bldr.master.data.get(
341 ('builders', bldrid, 'buildrequests'),
342 [
343 resultspec.Filter('complete', 'eq', [True]),
344 resultspec.Filter('results', 'ne', [results.SKIPPED]),
345 ],
346 order=['-complete_at'], limit=1)
347 if not completed:
348 return
349
350 return completed[0]['complete_at']
351
352 @defer.inlineCallbacks
353 def prioritizeBuilders(master, builders):
354 """Returns sorted list of builders by their last timestamp of completed and
355 not skipped build.
356
357 @returns: list of sorted builders
358 """
359
360 def is_building(bldr):
361 return bool(bldr.building) or bool(bldr.old_building)
362
363 def bldr_info(bldr):
364 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
365 d.addCallback(lambda complete_at: (complete_at, bldr))
366 return d
367
368 def bldr_sort(item):
369 (complete_at, bldr) = item
370
371 if not complete_at:
372 date = datetime.min
373 complete_at = date.replace(tzinfo=tzutc())
374
375 if is_building(bldr):
376 date = datetime.max
377 complete_at = date.replace(tzinfo=tzutc())
378
379 return (complete_at, bldr.name)
380
381 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
382 results.sort(key=bldr_sort)
383
384 for r in results:
385 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
386
387 return [r[1] for r in results]
388
389 c['prioritizeBuilders'] = prioritizeBuilders
390 c['builders'] = []
391
392 dlLock = locks.WorkerLock("worker_dl")
393
394 workerNames = [ ]
395
396 for worker in c['workers']:
397 workerNames.append(worker.workername)
398
399 force_factory = BuildFactory()
400
401 c['builders'].append(BuilderConfig(
402 name = "00_force_build",
403 workernames = workerNames,
404 factory = force_factory))
405
406 for arch in arches:
407 ts = arch[1].split('/')
408
409 factory = BuildFactory()
410
411 # setup shared work directory if required
412 factory.addStep(ShellCommand(
413 name = "sharedwd",
414 description = "Setting up shared work directory",
415 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
416 workdir = ".",
417 haltOnFailure = True,
418 doStepIf = IsSharedWorkdir))
419
420 # find number of cores
421 factory.addStep(SetProperty(
422 name = "nproc",
423 property = "nproc",
424 description = "Finding number of CPUs",
425 command = ["nproc"]))
426
427 # prepare workspace
428 factory.addStep(FileDownload(
429 mastersrc = scripts_dir + '/cleanup.sh',
430 workerdest = "../cleanup.sh",
431 mode = 0o755))
432
433 if not persistent:
434 factory.addStep(ShellCommand(
435 name = "cleanold",
436 description = "Cleaning previous builds",
437 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
438 workdir = ".",
439 haltOnFailure = True,
440 timeout = 2400))
441
442 factory.addStep(ShellCommand(
443 name = "cleanup",
444 description = "Cleaning work area",
445 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
446 workdir = ".",
447 haltOnFailure = True,
448 timeout = 2400))
449
450 # expire tree if needed
451 elif tree_expire > 0:
452 factory.addStep(FileDownload(
453 mastersrc = scripts_dir + '/expire.sh',
454 workerdest = "../expire.sh",
455 mode = 0o755))
456
457 factory.addStep(ShellCommand(
458 name = "expire",
459 description = "Checking for build tree expiry",
460 command = ["./expire.sh", str(tree_expire)],
461 workdir = ".",
462 haltOnFailure = True,
463 timeout = 2400))
464
465 factory.addStep(ShellCommand(
466 name = "mksdkdir",
467 description = "Preparing SDK directory",
468 command = ["mkdir", "-p", "sdk"],
469 haltOnFailure = True))
470
471 factory.addStep(ShellCommand(
472 name = "downloadsdk",
473 description = "Downloading SDK archive",
474 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
475 env={'RSYNC_PASSWORD': rsync_sdk_key},
476 haltOnFailure = True,
477 logEnviron = False))
478
479 factory.addStep(ShellCommand(
480 name = "unpacksdk",
481 description = "Unpacking SDK archive",
482 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
483 haltOnFailure = True))
484
485 factory.addStep(ShellCommand(
486 name = "updatesdk",
487 description = "Updating SDK",
488 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
489 haltOnFailure = True))
490
491 factory.addStep(ShellCommand(
492 name = "cleancmdlinks",
493 description = "Sanitizing host command symlinks",
494 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
495 haltOnFailure = True))
496
497 factory.addStep(StringDownload(
498 name = "writeversionmk",
499 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
500 workerdest = "sdk/getversion.mk",
501 mode = 0o755))
502
503 factory.addStep(SetProperty(
504 name = "getversion",
505 property = "release_version",
506 description = "Finding SDK release version",
507 workdir = "build/sdk",
508 command = ["make", "-f", "getversion.mk"]))
509
510 # install build key
511 if usign_key is not None:
512 factory.addStep(StringDownload(
513 name = "dlkeybuildpub",
514 s = UsignSec2Pub(usign_key, usign_comment),
515 workerdest = "sdk/key-build.pub",
516 mode = 0o600))
517
518 factory.addStep(StringDownload(
519 name = "dlkeybuild",
520 s = "# fake private key",
521 workerdest = "sdk/key-build",
522 mode = 0o600))
523
524 factory.addStep(StringDownload(
525 name = "dlkeybuilducert",
526 s = "# fake certificate",
527 workerdest = "sdk/key-build.ucert",
528 mode = 0o600))
529
530 factory.addStep(ShellCommand(
531 name = "mkdldir",
532 description = "Preparing download directory",
533 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
534 haltOnFailure = True))
535
536 factory.addStep(ShellCommand(
537 name = "mkconf",
538 description = "Preparing SDK configuration",
539 workdir = "build/sdk",
540 command = ["sh", "-c", "rm -f .config && make defconfig"]))
541
542 factory.addStep(FileDownload(
543 mastersrc = scripts_dir + '/ccache.sh',
544 workerdest = 'sdk/ccache.sh',
545 mode = 0o755))
546
547 factory.addStep(ShellCommand(
548 name = "prepccache",
549 description = "Preparing ccache",
550 workdir = "build/sdk",
551 command = ["./ccache.sh"],
552 haltOnFailure = True))
553
554 factory.addStep(ShellCommand(
555 name = "patchfeedsconfgitfull",
556 description = "Patching feeds.conf to use src-git-full",
557 workdir = "build/sdk",
558 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
559 haltOnFailure = True))
560
561 if git_ssh:
562 factory.addStep(StringDownload(
563 name = "dlgitclonekey",
564 s = git_ssh_key,
565 workerdest = "../git-clone.key",
566 mode = 0o600))
567
568 factory.addStep(ShellCommand(
569 name = "patchfeedsconf",
570 description = "Patching feeds.conf to use SSH cloning",
571 workdir = "build/sdk",
572 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
573 haltOnFailure = True))
574
575 factory.addStep(ShellCommand(
576 name = "updatefeeds",
577 description = "Updating feeds",
578 workdir = "build/sdk",
579 command = ["./scripts/feeds", "update", "-f"],
580 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
581 haltOnFailure = True))
582
583 if git_ssh:
584 factory.addStep(ShellCommand(
585 name = "rmfeedsconf",
586 description = "Removing feeds.conf",
587 workdir = "build/sdk",
588 command=["rm", "feeds.conf"],
589 haltOnFailure = True))
590
591 factory.addStep(ShellCommand(
592 name = "installfeeds",
593 description = "Installing feeds",
594 workdir = "build/sdk",
595 command = ["./scripts/feeds", "install", "-a"],
596 haltOnFailure = True))
597
598 factory.addStep(ShellCommand(
599 name = "logclear",
600 description = "Clearing failure logs",
601 workdir = "build/sdk",
602 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
603 haltOnFailure = False
604 ))
605
606 factory.addStep(ShellCommand(
607 name = "compile",
608 description = "Building packages",
609 workdir = "build/sdk",
610 timeout = 3600,
611 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
612 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
613 haltOnFailure = True))
614
615 factory.addStep(ShellCommand(
616 name = "mkfeedsconf",
617 description = "Generating pinned feeds.conf",
618 workdir = "build/sdk",
619 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
620
621 if ini.has_option("gpg", "key") or usign_key is not None:
622 factory.addStep(MasterShellCommand(
623 name = "signprepare",
624 description = "Preparing temporary signing directory",
625 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
626 haltOnFailure = True
627 ))
628
629 factory.addStep(ShellCommand(
630 name = "signpack",
631 description = "Packing files to sign",
632 workdir = "build/sdk",
633 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
634 haltOnFailure = True
635 ))
636
637 factory.addStep(FileUpload(
638 workersrc = "sdk/sign.tar.gz",
639 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
640 haltOnFailure = True
641 ))
642
643 factory.addStep(MasterShellCommand(
644 name = "signfiles",
645 description = "Signing files",
646 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
647 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
648 haltOnFailure = True
649 ))
650
651 factory.addStep(FileDownload(
652 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
653 workerdest = "sdk/sign.tar.gz",
654 haltOnFailure = True
655 ))
656
657 factory.addStep(ShellCommand(
658 name = "signunpack",
659 description = "Unpacking signed files",
660 workdir = "build/sdk",
661 command = ["tar", "-xzf", "sign.tar.gz"],
662 haltOnFailure = True
663 ))
664
665 factory.addStep(ShellCommand(
666 name = "uploadprepare",
667 description = "Preparing package directory",
668 workdir = "build/sdk",
669 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
670 env={'RSYNC_PASSWORD': rsync_bin_key},
671 haltOnFailure = True,
672 logEnviron = False
673 ))
674
675 factory.addStep(ShellCommand(
676 name = "packageupload",
677 description = "Uploading package files",
678 workdir = "build/sdk",
679 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
680 env={'RSYNC_PASSWORD': rsync_bin_key},
681 haltOnFailure = True,
682 logEnviron = False
683 ))
684
685 factory.addStep(ShellCommand(
686 name = "logprepare",
687 description = "Preparing log directory",
688 workdir = "build/sdk",
689 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
690 env={'RSYNC_PASSWORD': rsync_bin_key},
691 haltOnFailure = True,
692 logEnviron = False
693 ))
694
695 factory.addStep(ShellCommand(
696 name = "logfind",
697 description = "Finding failure logs",
698 workdir = "build/sdk/logs/package/feeds",
699 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
700 haltOnFailure = False
701 ))
702
703 factory.addStep(ShellCommand(
704 name = "logcollect",
705 description = "Collecting failure logs",
706 workdir = "build/sdk",
707 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
708 haltOnFailure = False
709 ))
710
711 factory.addStep(ShellCommand(
712 name = "logupload",
713 description = "Uploading failure logs",
714 workdir = "build/sdk",
715 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
716 env={'RSYNC_PASSWORD': rsync_bin_key},
717 haltOnFailure = False,
718 logEnviron = False
719 ))
720
721 if rsync_src_url is not None:
722 factory.addStep(ShellCommand(
723 name = "sourcelist",
724 description = "Finding source archives to upload",
725 workdir = "build/sdk",
726 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
727 haltOnFailure = True
728 ))
729
730 factory.addStep(ShellCommand(
731 name = "sourceupload",
732 description = "Uploading source archives",
733 workdir = "build/sdk",
734 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
735 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
736 env={'RSYNC_PASSWORD': rsync_src_key},
737 haltOnFailure = False,
738 logEnviron = False
739 ))
740
741 factory.addStep(ShellCommand(
742 name = "df",
743 description = "Reporting disk usage",
744 command=["df", "-h", "."],
745 env={'LC_ALL': 'C'},
746 haltOnFailure = False,
747 alwaysRun = True
748 ))
749
750 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
751
752 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
753 force_factory.addStep(steps.Trigger(
754 name = "trigger_%s" % arch[0],
755 description = "Triggering %s build" % arch[0],
756 schedulerNames = [ "trigger_%s" % arch[0] ],
757 set_properties = { "reason": Property("reason") },
758 doStepIf = IsArchitectureSelected(arch[0])
759 ))
760
761 ####### STATUS arches
762
763 # 'status' is a list of Status arches. The results of each build will be
764 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
765 # including web pages, email senders, and IRC bots.
766
767 if ini.has_option("phase2", "status_bind"):
768 c['www'] = {
769 'port': ini.get("phase2", "status_bind"),
770 'plugins': {
771 'waterfall_view': True,
772 'console_view': True,
773 'grid_view': True
774 }
775 }
776
777 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
778 c['www']['auth'] = util.UserPasswordAuth([
779 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
780 ])
781 c['www']['authz'] = util.Authz(
782 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
783 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
784 )
785
786 ####### PROJECT IDENTITY
787
788 # the 'title' string will appear at the top of this buildbot
789 # installation's html.WebStatus home page (linked to the
790 # 'titleURL') and is embedded in the title of the waterfall HTML page.
791
792 c['title'] = ini.get("general", "title")
793 c['titleURL'] = ini.get("general", "title_url")
794
795 # the 'buildbotURL' string should point to the location where the buildbot's
796 # internal web server (usually the html.WebStatus page) is visible. This
797 # typically uses the port number set in the Waterfall 'status' entry, but
798 # with an externally-visible host name which the buildbot cannot figure out
799 # without some help.
800
801 c['buildbotURL'] = buildbot_url
802
803 ####### DB URL
804
805 c['db'] = {
806 # This specifies what database buildbot uses to store its state. You can leave
807 # this at its default for all but the largest installations.
808 'db_url' : "sqlite:///state.sqlite",
809 }
810
811 c['buildbotNetUsageData'] = None