59443b86057d9c7a728f17302e2a5904c14d4238
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65 tree_expire = 0
66 git_ssh = False
67 git_ssh_key = None
68
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
71
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
74
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
77
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
80
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
83 else:
84 git_ssh = False
85
86 c['workers'] = []
87 max_builds = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
96 max_builds[name] = 1
97
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
100
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
103
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
108
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
110
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
113 # --master option)
114 c['protocols'] = {'pb': {'port': worker_port}}
115
116 # coalesce builds
117 c['collapseRequests'] = True
118
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
122 hour=6,
123 )]
124
125 ####### CHANGESOURCES
126
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
129
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
132
133 rsync_src_url = None
134 rsync_src_key = None
135
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
139
140 rsync_sdk_url = None
141 rsync_sdk_key = None
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
143
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
146
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
149
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
152
153 rsync_defopts = ["-4", "-v", "--timeout=120"]
154
155 repo_url = ini.get("repo", "url")
156 repo_branch = "master"
157
158 if ini.has_option("repo", "branch"):
159 repo_branch = ini.get("repo", "branch")
160
161 usign_key = None
162 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
163
164 if ini.has_option("usign", "key"):
165 usign_key = ini.get("usign", "key")
166
167 if ini.has_option("usign", "comment"):
168 usign_comment = ini.get("usign", "comment")
169
170
171 # find arches
172 arches = [ ]
173 archnames = [ ]
174
175 if not os.path.isdir(work_dir+'/source.git'):
176 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
177 else:
178 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
179
180 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
181 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
182 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
183
184 while True:
185 line = findarches.stdout.readline()
186 if not line:
187 break
188 at = line.decode().strip().split()
189 arches.append(at)
190 archnames.append(at[0])
191
192
193 # find feeds
194 feeds = []
195 feedbranches = dict()
196
197 c['change_source'] = []
198
199 def parse_feed_entry(line):
200 parts = line.strip().split()
201 if parts[0].startswith("src-git"):
202 feeds.append(parts)
203 url = parts[2].strip().split(';')
204 branch = url[1] if len(url) > 1 else 'master'
205 feedbranches[url[0]] = branch
206 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
207
208 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
209 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
210
211 line = make.stdout.readline()
212 if line:
213 parse_feed_entry(str(line, 'utf-8'))
214
215 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
216 for line in f:
217 parse_feed_entry(line)
218
219 if len(c['change_source']) == 0:
220 log.err("FATAL ERROR: no change_sources defined, aborting!")
221 sys.exit(-1)
222
223 ####### SCHEDULERS
224
225 # Configure the Schedulers, which decide how to react to incoming changes. In this
226 # case, just kick off a 'basebuild' build
227
228 c['schedulers'] = []
229 c['schedulers'].append(SingleBranchScheduler(
230 name = "all",
231 change_filter = filter.ChangeFilter(
232 filter_fn = lambda change: change.branch == feedbranches[change.repository]
233 ),
234 treeStableTimer = 60,
235 builderNames = archnames))
236
237 c['schedulers'].append(ForceScheduler(
238 name = "force",
239 buttonName = "Force builds",
240 label = "Force build details",
241 builderNames = [ "00_force_build" ],
242
243 codebases = [
244 util.CodebaseParameter(
245 "",
246 label = "Repository",
247 branch = util.FixedParameter(name = "branch", default = ""),
248 revision = util.FixedParameter(name = "revision", default = ""),
249 repository = util.FixedParameter(name = "repository", default = ""),
250 project = util.FixedParameter(name = "project", default = "")
251 )
252 ],
253
254 reason = util.StringParameter(
255 name = "reason",
256 label = "Reason",
257 default = "Trigger build",
258 required = True,
259 size = 80
260 ),
261
262 properties = [
263 util.NestedParameter(
264 name="options",
265 label="Build Options",
266 layout="vertical",
267 fields=[
268 util.ChoiceStringParameter(
269 name = "architecture",
270 label = "Build architecture",
271 default = "all",
272 choices = [ "all" ] + archnames
273 )
274 ]
275 )
276 ]
277 ))
278
279 ####### BUILDERS
280
281 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
282 # what steps, and which workers can execute them. Note that any particular build will
283 # only take place on one worker.
284
285 @properties.renderer
286 def GetDirectorySuffix(props):
287 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
288 if props.hasProperty("release_version"):
289 m = verpat.match(props["release_version"])
290 if m is not None:
291 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
292 return ""
293
294 @properties.renderer
295 def GetNumJobs(props):
296 if props.hasProperty("workername") and props.hasProperty("nproc"):
297 return str(int(props["nproc"]) / max_builds[props["workername"]])
298 else:
299 return "1"
300
301 @properties.renderer
302 def GetCwd(props):
303 if props.hasProperty("builddir"):
304 return props["builddir"]
305 elif props.hasProperty("workdir"):
306 return props["workdir"]
307 else:
308 return "/"
309
310 def IsArchitectureSelected(target):
311 def CheckArchitectureProperty(step):
312 try:
313 options = step.getProperty("options")
314 if type(options) is dict:
315 selected_arch = options.get("architecture", "all")
316 if selected_arch != "all" and selected_arch != target:
317 return False
318 except KeyError:
319 pass
320
321 return True
322
323 return CheckArchitectureProperty
324
325 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
326 try:
327 seckey = base64.b64decode(seckey)
328 except Exception:
329 return None
330
331 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
332 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
333
334 def IsSharedWorkdir(step):
335 return bool(step.getProperty("shared_wd"))
336
337 @defer.inlineCallbacks
338 def getNewestCompleteTime(bldr):
339 """Returns the complete_at of the latest completed and not SKIPPED
340 build request for this builder, or None if there are no such build
341 requests. We need to filter out SKIPPED requests because we're
342 using collapseRequests=True which is unfortunately marking all
343 previous requests as complete when new buildset is created.
344
345 @returns: datetime instance or None, via Deferred
346 """
347
348 bldrid = yield bldr.getBuilderId()
349 completed = yield bldr.master.data.get(
350 ('builders', bldrid, 'buildrequests'),
351 [
352 resultspec.Filter('complete', 'eq', [True]),
353 resultspec.Filter('results', 'ne', [results.SKIPPED]),
354 ],
355 order=['-complete_at'], limit=1)
356 if not completed:
357 return
358
359 complete_at = completed[0]['complete_at']
360
361 last_build = yield bldr.master.data.get(
362 ('builds', ),
363 [
364 resultspec.Filter('builderid', 'eq', [bldrid]),
365 ],
366 order=['-started_at'], limit=1)
367
368 if last_build and last_build[0]:
369 last_complete_at = last_build[0]['complete_at']
370 if last_complete_at and (last_complete_at > complete_at):
371 return last_complete_at
372
373 return complete_at
374
375 @defer.inlineCallbacks
376 def prioritizeBuilders(master, builders):
377 """Returns sorted list of builders by their last timestamp of completed and
378 not skipped build.
379
380 @returns: list of sorted builders
381 """
382
383 def is_building(bldr):
384 return bool(bldr.building) or bool(bldr.old_building)
385
386 def bldr_info(bldr):
387 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
388 d.addCallback(lambda complete_at: (complete_at, bldr))
389 return d
390
391 def bldr_sort(item):
392 (complete_at, bldr) = item
393
394 if not complete_at:
395 date = datetime.min
396 complete_at = date.replace(tzinfo=tzutc())
397
398 if is_building(bldr):
399 date = datetime.max
400 complete_at = date.replace(tzinfo=tzutc())
401
402 return (complete_at, bldr.name)
403
404 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
405 results.sort(key=bldr_sort)
406
407 for r in results:
408 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
409
410 return [r[1] for r in results]
411
412 c['prioritizeBuilders'] = prioritizeBuilders
413 c['builders'] = []
414
415 dlLock = locks.WorkerLock("worker_dl")
416
417 workerNames = [ ]
418
419 for worker in c['workers']:
420 workerNames.append(worker.workername)
421
422 force_factory = BuildFactory()
423
424 c['builders'].append(BuilderConfig(
425 name = "00_force_build",
426 workernames = workerNames,
427 factory = force_factory))
428
429 for arch in arches:
430 ts = arch[1].split('/')
431
432 factory = BuildFactory()
433
434 # setup shared work directory if required
435 factory.addStep(ShellCommand(
436 name = "sharedwd",
437 description = "Setting up shared work directory",
438 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
439 workdir = ".",
440 haltOnFailure = True,
441 doStepIf = IsSharedWorkdir))
442
443 # find number of cores
444 factory.addStep(SetPropertyFromCommand(
445 name = "nproc",
446 property = "nproc",
447 description = "Finding number of CPUs",
448 command = ["nproc"]))
449
450 # prepare workspace
451 factory.addStep(FileDownload(
452 mastersrc = scripts_dir + '/cleanup.sh',
453 workerdest = "../cleanup.sh",
454 mode = 0o755))
455
456 if not persistent:
457 factory.addStep(ShellCommand(
458 name = "cleanold",
459 description = "Cleaning previous builds",
460 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
461 workdir = ".",
462 haltOnFailure = True,
463 timeout = 2400))
464
465 factory.addStep(ShellCommand(
466 name = "cleanup",
467 description = "Cleaning work area",
468 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
469 workdir = ".",
470 haltOnFailure = True,
471 timeout = 2400))
472
473 # expire tree if needed
474 elif tree_expire > 0:
475 factory.addStep(FileDownload(
476 mastersrc = scripts_dir + '/expire.sh',
477 workerdest = "../expire.sh",
478 mode = 0o755))
479
480 factory.addStep(ShellCommand(
481 name = "expire",
482 description = "Checking for build tree expiry",
483 command = ["./expire.sh", str(tree_expire)],
484 workdir = ".",
485 haltOnFailure = True,
486 timeout = 2400))
487
488 factory.addStep(ShellCommand(
489 name = "mksdkdir",
490 description = "Preparing SDK directory",
491 command = ["mkdir", "-p", "sdk"],
492 haltOnFailure = True))
493
494 factory.addStep(ShellCommand(
495 name = "downloadsdk",
496 description = "Downloading SDK archive",
497 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
498 env={'RSYNC_PASSWORD': rsync_sdk_key},
499 haltOnFailure = True,
500 logEnviron = False))
501
502 factory.addStep(ShellCommand(
503 name = "unpacksdk",
504 description = "Unpacking SDK archive",
505 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
506 haltOnFailure = True))
507
508 factory.addStep(ShellCommand(
509 name = "updatesdk",
510 description = "Updating SDK",
511 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
512 haltOnFailure = True))
513
514 factory.addStep(ShellCommand(
515 name = "cleancmdlinks",
516 description = "Sanitizing host command symlinks",
517 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
518 haltOnFailure = True))
519
520 factory.addStep(StringDownload(
521 name = "writeversionmk",
522 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
523 workerdest = "sdk/getversion.mk",
524 mode = 0o755))
525
526 factory.addStep(SetPropertyFromCommand(
527 name = "getversion",
528 property = "release_version",
529 description = "Finding SDK release version",
530 workdir = "build/sdk",
531 command = ["make", "-f", "getversion.mk"]))
532
533 # install build key
534 if usign_key is not None:
535 factory.addStep(StringDownload(
536 name = "dlkeybuildpub",
537 s = UsignSec2Pub(usign_key, usign_comment),
538 workerdest = "sdk/key-build.pub",
539 mode = 0o600))
540
541 factory.addStep(StringDownload(
542 name = "dlkeybuild",
543 s = "# fake private key",
544 workerdest = "sdk/key-build",
545 mode = 0o600))
546
547 factory.addStep(StringDownload(
548 name = "dlkeybuilducert",
549 s = "# fake certificate",
550 workerdest = "sdk/key-build.ucert",
551 mode = 0o600))
552
553 factory.addStep(ShellCommand(
554 name = "mkdldir",
555 description = "Preparing download directory",
556 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
557 haltOnFailure = True))
558
559 factory.addStep(ShellCommand(
560 name = "mkconf",
561 description = "Preparing SDK configuration",
562 workdir = "build/sdk",
563 command = ["sh", "-c", "rm -f .config && make defconfig"]))
564
565 factory.addStep(FileDownload(
566 mastersrc = scripts_dir + '/ccache.sh',
567 workerdest = 'sdk/ccache.sh',
568 mode = 0o755))
569
570 factory.addStep(ShellCommand(
571 name = "prepccache",
572 description = "Preparing ccache",
573 workdir = "build/sdk",
574 command = ["./ccache.sh"],
575 haltOnFailure = True))
576
577 if git_ssh:
578 factory.addStep(StringDownload(
579 name = "dlgitclonekey",
580 s = git_ssh_key,
581 workerdest = "../git-clone.key",
582 mode = 0o600))
583
584 factory.addStep(ShellCommand(
585 name = "patchfeedsconf",
586 description = "Patching feeds.conf",
587 workdir = "build/sdk",
588 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
589 haltOnFailure = True))
590
591 factory.addStep(ShellCommand(
592 name = "updatefeeds",
593 description = "Updating feeds",
594 workdir = "build/sdk",
595 command = ["./scripts/feeds", "update", "-f"],
596 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
597 haltOnFailure = True))
598
599 if git_ssh:
600 factory.addStep(ShellCommand(
601 name = "rmfeedsconf",
602 description = "Removing feeds.conf",
603 workdir = "build/sdk",
604 command=["rm", "feeds.conf"],
605 haltOnFailure = True))
606
607 factory.addStep(ShellCommand(
608 name = "installfeeds",
609 description = "Installing feeds",
610 workdir = "build/sdk",
611 command = ["./scripts/feeds", "install", "-a"],
612 haltOnFailure = True))
613
614 factory.addStep(ShellCommand(
615 name = "logclear",
616 description = "Clearing failure logs",
617 workdir = "build/sdk",
618 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
619 haltOnFailure = False,
620 flunkOnFailure = False,
621 warnOnFailure = True,
622 ))
623
624 factory.addStep(ShellCommand(
625 name = "compile",
626 description = "Building packages",
627 workdir = "build/sdk",
628 timeout = 3600,
629 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
630 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
631 haltOnFailure = True))
632
633 factory.addStep(ShellCommand(
634 name = "mkfeedsconf",
635 description = "Generating pinned feeds.conf",
636 workdir = "build/sdk",
637 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
638
639 if ini.has_option("gpg", "key") or usign_key is not None:
640 factory.addStep(MasterShellCommand(
641 name = "signprepare",
642 description = "Preparing temporary signing directory",
643 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
644 haltOnFailure = True
645 ))
646
647 factory.addStep(ShellCommand(
648 name = "signpack",
649 description = "Packing files to sign",
650 workdir = "build/sdk",
651 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
652 haltOnFailure = True
653 ))
654
655 factory.addStep(FileUpload(
656 workersrc = "sdk/sign.tar.gz",
657 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
658 haltOnFailure = True
659 ))
660
661 factory.addStep(MasterShellCommand(
662 name = "signfiles",
663 description = "Signing files",
664 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
665 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
666 haltOnFailure = True
667 ))
668
669 factory.addStep(FileDownload(
670 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
671 workerdest = "sdk/sign.tar.gz",
672 haltOnFailure = True
673 ))
674
675 factory.addStep(ShellCommand(
676 name = "signunpack",
677 description = "Unpacking signed files",
678 workdir = "build/sdk",
679 command = ["tar", "-xzf", "sign.tar.gz"],
680 haltOnFailure = True
681 ))
682
683 factory.addStep(ShellCommand(
684 name = "uploadprepare",
685 description = "Preparing package directory",
686 workdir = "build/sdk",
687 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
688 env={'RSYNC_PASSWORD': rsync_bin_key},
689 haltOnFailure = True,
690 logEnviron = False
691 ))
692
693 factory.addStep(ShellCommand(
694 name = "packageupload",
695 description = "Uploading package files",
696 workdir = "build/sdk",
697 command = ["rsync"] + rsync_defopts + ["--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
698 env={'RSYNC_PASSWORD': rsync_bin_key},
699 haltOnFailure = True,
700 logEnviron = False
701 ))
702
703 factory.addStep(ShellCommand(
704 name = "logprepare",
705 description = "Preparing log directory",
706 workdir = "build/sdk",
707 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
708 env={'RSYNC_PASSWORD': rsync_bin_key},
709 haltOnFailure = True,
710 logEnviron = False
711 ))
712
713 factory.addStep(ShellCommand(
714 name = "logfind",
715 description = "Finding failure logs",
716 workdir = "build/sdk/logs/package/feeds",
717 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
718 haltOnFailure = False,
719 flunkOnFailure = False,
720 warnOnFailure = True,
721 ))
722
723 factory.addStep(ShellCommand(
724 name = "logcollect",
725 description = "Collecting failure logs",
726 workdir = "build/sdk",
727 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
728 haltOnFailure = False,
729 flunkOnFailure = False,
730 warnOnFailure = True,
731 ))
732
733 factory.addStep(ShellCommand(
734 name = "logupload",
735 description = "Uploading failure logs",
736 workdir = "build/sdk",
737 command = ["rsync"] + rsync_defopts + ["--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
738 env={'RSYNC_PASSWORD': rsync_bin_key},
739 haltOnFailure = False,
740 flunkOnFailure = False,
741 warnOnFailure = True,
742 logEnviron = False
743 ))
744
745 if rsync_src_url is not None:
746 factory.addStep(ShellCommand(
747 name = "sourcelist",
748 description = "Finding source archives to upload",
749 workdir = "build/sdk",
750 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
751 haltOnFailure = True
752 ))
753
754 factory.addStep(ShellCommand(
755 name = "sourceupload",
756 description = "Uploading source archives",
757 workdir = "build/sdk",
758 command = ["rsync"] + rsync_defopts + ["--files-from=sourcelist", "--progress", "--checksum", "--delay-updates",
759 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
760 env={'RSYNC_PASSWORD': rsync_src_key},
761 haltOnFailure = False,
762 flunkOnFailure = False,
763 warnOnFailure = True,
764 logEnviron = False
765 ))
766
767 factory.addStep(ShellCommand(
768 name = "df",
769 description = "Reporting disk usage",
770 command=["df", "-h", "."],
771 env={'LC_ALL': 'C'},
772 haltOnFailure = False,
773 flunkOnFailure = False,
774 warnOnFailure = False,
775 alwaysRun = True
776 ))
777
778 factory.addStep(ShellCommand(
779 name = "du",
780 description = "Reporting estimated file space usage",
781 command=["du", "-sh", "."],
782 env={'LC_ALL': 'C'},
783 haltOnFailure = False,
784 flunkOnFailure = False,
785 warnOnFailure = False,
786 alwaysRun = True
787 ))
788
789 factory.addStep(ShellCommand(
790 name = "ccachestat",
791 description = "Reporting ccache stats",
792 command=["ccache", "-s"],
793 want_stderr = False,
794 haltOnFailure = False,
795 flunkOnFailure = False,
796 warnOnFailure = False,
797 alwaysRun = True,
798 ))
799
800 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
801
802 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
803 force_factory.addStep(steps.Trigger(
804 name = "trigger_%s" % arch[0],
805 description = "Triggering %s build" % arch[0],
806 schedulerNames = [ "trigger_%s" % arch[0] ],
807 set_properties = { "reason": Property("reason") },
808 doStepIf = IsArchitectureSelected(arch[0])
809 ))
810
811 ####### STATUS arches
812
813 # 'status' is a list of Status arches. The results of each build will be
814 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
815 # including web pages, email senders, and IRC bots.
816
817 if ini.has_option("phase2", "status_bind"):
818 c['www'] = {
819 'port': ini.get("phase2", "status_bind"),
820 'plugins': {
821 'waterfall_view': True,
822 'console_view': True,
823 'grid_view': True
824 }
825 }
826
827 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
828 c['www']['auth'] = util.UserPasswordAuth([
829 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
830 ])
831 c['www']['authz'] = util.Authz(
832 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
833 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
834 )
835
836 ####### PROJECT IDENTITY
837
838 # the 'title' string will appear at the top of this buildbot
839 # installation's html.WebStatus home page (linked to the
840 # 'titleURL') and is embedded in the title of the waterfall HTML page.
841
842 c['title'] = ini.get("general", "title")
843 c['titleURL'] = ini.get("general", "title_url")
844
845 # the 'buildbotURL' string should point to the location where the buildbot's
846 # internal web server (usually the html.WebStatus page) is visible. This
847 # typically uses the port number set in the Waterfall 'status' entry, but
848 # with an externally-visible host name which the buildbot cannot figure out
849 # without some help.
850
851 c['buildbotURL'] = buildbot_url
852
853 ####### DB URL
854
855 c['db'] = {
856 # This specifies what database buildbot uses to store its state. You can leave
857 # this at its default for all but the largest installations.
858 'db_url' : "sqlite:///state.sqlite",
859 }
860
861 c['buildbotNetUsageData'] = None