phase1,phase2: s/master/main for phase{1,2}
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65
66 if ini.has_option("phase2", "port"):
67 worker_port = ini.get("phase2", "port")
68
69 if ini.has_option("phase2", "persistent"):
70 persistent = ini.getboolean("phase2", "persistent")
71
72 c['workers'] = []
73
74 for section in ini.sections():
75 if section.startswith("worker "):
76 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
77 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
78 name = ini.get(section, "name")
79 password = ini.get(section, "password")
80 sl_props = { 'shared_wd': True }
81
82 if ini.has_option(section, "shared_wd"):
83 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
84
85 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
86
87 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
88 # This must match the value configured into the buildworkers (with their
89 # --master option)
90 c['protocols'] = {'pb': {'port': worker_port}}
91
92 # coalesce builds
93 c['collapseRequests'] = True
94
95 # Reduce amount of backlog data
96 c['configurators'] = [util.JanitorConfigurator(
97 logHorizon=timedelta(days=3),
98 hour=6,
99 )]
100
101 ####### CHANGESOURCES
102
103 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
104 scripts_dir = os.path.abspath("../scripts")
105
106 rsync_bin_url = ini.get("rsync", "binary_url")
107 rsync_bin_key = ini.get("rsync", "binary_password")
108
109 rsync_src_url = None
110 rsync_src_key = None
111
112 if ini.has_option("rsync", "source_url"):
113 rsync_src_url = ini.get("rsync", "source_url")
114 rsync_src_key = ini.get("rsync", "source_password")
115
116 rsync_sdk_url = None
117 rsync_sdk_key = None
118 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
119
120 if ini.has_option("rsync", "sdk_url"):
121 rsync_sdk_url = ini.get("rsync", "sdk_url")
122
123 if ini.has_option("rsync", "sdk_password"):
124 rsync_sdk_key = ini.get("rsync", "sdk_password")
125
126 if ini.has_option("rsync", "sdk_pattern"):
127 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
128
129 rsync_defopts = ["-4", "-v", "--timeout=120"]
130
131 repo_url = ini.get("repo", "url")
132 repo_branch = "master"
133
134 if ini.has_option("repo", "branch"):
135 repo_branch = ini.get("repo", "branch")
136
137 usign_key = None
138 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
139
140 if ini.has_option("usign", "key"):
141 usign_key = ini.get("usign", "key")
142
143 if ini.has_option("usign", "comment"):
144 usign_comment = ini.get("usign", "comment")
145
146
147 # find arches
148 arches = [ ]
149 archnames = [ ]
150
151 if not os.path.isdir(work_dir+'/source.git'):
152 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
153 else:
154 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
155
156 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
157 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
158 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
159
160 while True:
161 line = findarches.stdout.readline()
162 if not line:
163 break
164 at = line.decode().strip().split()
165 arches.append(at)
166 archnames.append(at[0])
167
168
169 # find feeds
170 feeds = []
171 feedbranches = dict()
172
173 c['change_source'] = []
174
175 def parse_feed_entry(line):
176 parts = line.strip().split()
177 if parts[0].startswith("src-git"):
178 feeds.append(parts)
179 url = parts[2].strip().split(';')
180 branch = url[1] if len(url) > 1 else 'master'
181 feedbranches[url[0]] = branch
182 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
183
184 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
185 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
186
187 line = make.stdout.readline()
188 if line:
189 parse_feed_entry(str(line, 'utf-8'))
190
191 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
192 for line in f:
193 parse_feed_entry(line)
194
195 if len(c['change_source']) == 0:
196 log.err("FATAL ERROR: no change_sources defined, aborting!")
197 sys.exit(-1)
198
199 ####### SCHEDULERS
200
201 # Configure the Schedulers, which decide how to react to incoming changes. In this
202 # case, just kick off a 'basebuild' build
203
204 c['schedulers'] = []
205 c['schedulers'].append(SingleBranchScheduler(
206 name = "all",
207 change_filter = filter.ChangeFilter(
208 filter_fn = lambda change: change.branch == feedbranches[change.repository]
209 ),
210 treeStableTimer = 60,
211 builderNames = archnames))
212
213 c['schedulers'].append(ForceScheduler(
214 name = "force",
215 buttonName = "Force builds",
216 label = "Force build details",
217 builderNames = [ "00_force_build" ],
218
219 codebases = [
220 util.CodebaseParameter(
221 "",
222 label = "Repository",
223 branch = util.FixedParameter(name = "branch", default = ""),
224 revision = util.FixedParameter(name = "revision", default = ""),
225 repository = util.FixedParameter(name = "repository", default = ""),
226 project = util.FixedParameter(name = "project", default = "")
227 )
228 ],
229
230 reason = util.StringParameter(
231 name = "reason",
232 label = "Reason",
233 default = "Trigger build",
234 required = True,
235 size = 80
236 ),
237
238 properties = [
239 util.NestedParameter(
240 name="options",
241 label="Build Options",
242 layout="vertical",
243 fields=[
244 util.ChoiceStringParameter(
245 name = "architecture",
246 label = "Build architecture",
247 default = "all",
248 choices = [ "all" ] + archnames
249 )
250 ]
251 )
252 ]
253 ))
254
255 ####### BUILDERS
256
257 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
258 # what steps, and which workers can execute them. Note that any particular build will
259 # only take place on one worker.
260
261 @properties.renderer
262 def GetDirectorySuffix(props):
263 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
264 if props.hasProperty("release_version"):
265 m = verpat.match(props["release_version"])
266 if m is not None:
267 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
268 return ""
269
270 @properties.renderer
271 def GetCwd(props):
272 if props.hasProperty("builddir"):
273 return props["builddir"]
274 elif props.hasProperty("workdir"):
275 return props["workdir"]
276 else:
277 return "/"
278
279 def IsArchitectureSelected(target):
280 def CheckArchitectureProperty(step):
281 try:
282 options = step.getProperty("options")
283 if type(options) is dict:
284 selected_arch = options.get("architecture", "all")
285 if selected_arch != "all" and selected_arch != target:
286 return False
287 except KeyError:
288 pass
289
290 return True
291
292 return CheckArchitectureProperty
293
294 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
295 try:
296 seckey = base64.b64decode(seckey)
297 except Exception:
298 return None
299
300 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
301 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
302
303 def IsSharedWorkdir(step):
304 return bool(step.getProperty("shared_wd"))
305
306 @defer.inlineCallbacks
307 def getNewestCompleteTime(bldr):
308 """Returns the complete_at of the latest completed and not SKIPPED
309 build request for this builder, or None if there are no such build
310 requests. We need to filter out SKIPPED requests because we're
311 using collapseRequests=True which is unfortunately marking all
312 previous requests as complete when new buildset is created.
313
314 @returns: datetime instance or None, via Deferred
315 """
316
317 bldrid = yield bldr.getBuilderId()
318 completed = yield bldr.master.data.get(
319 ('builders', bldrid, 'buildrequests'),
320 [
321 resultspec.Filter('complete', 'eq', [True]),
322 resultspec.Filter('results', 'ne', [results.SKIPPED]),
323 ],
324 order=['-complete_at'], limit=1)
325 if not completed:
326 return
327
328 complete_at = completed[0]['complete_at']
329
330 last_build = yield bldr.master.data.get(
331 ('builds', ),
332 [
333 resultspec.Filter('builderid', 'eq', [bldrid]),
334 ],
335 order=['-started_at'], limit=1)
336
337 if last_build and last_build[0]:
338 last_complete_at = last_build[0]['complete_at']
339 if last_complete_at and (last_complete_at > complete_at):
340 return last_complete_at
341
342 return complete_at
343
344 @defer.inlineCallbacks
345 def prioritizeBuilders(master, builders):
346 """Returns sorted list of builders by their last timestamp of completed and
347 not skipped build.
348
349 @returns: list of sorted builders
350 """
351
352 def is_building(bldr):
353 return bool(bldr.building) or bool(bldr.old_building)
354
355 def bldr_info(bldr):
356 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
357 d.addCallback(lambda complete_at: (complete_at, bldr))
358 return d
359
360 def bldr_sort(item):
361 (complete_at, bldr) = item
362
363 if not complete_at:
364 date = datetime.min
365 complete_at = date.replace(tzinfo=tzutc())
366
367 if is_building(bldr):
368 date = datetime.max
369 complete_at = date.replace(tzinfo=tzutc())
370
371 return (complete_at, bldr.name)
372
373 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
374 results.sort(key=bldr_sort)
375
376 for r in results:
377 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
378
379 return [r[1] for r in results]
380
381 c['prioritizeBuilders'] = prioritizeBuilders
382 c['builders'] = []
383
384 dlLock = locks.WorkerLock("worker_dl")
385
386 workerNames = [ ]
387
388 for worker in c['workers']:
389 workerNames.append(worker.workername)
390
391 force_factory = BuildFactory()
392
393 c['builders'].append(BuilderConfig(
394 name = "00_force_build",
395 workernames = workerNames,
396 factory = force_factory))
397
398 for arch in arches:
399 ts = arch[1].split('/')
400
401 factory = BuildFactory()
402
403 # setup shared work directory if required
404 factory.addStep(ShellCommand(
405 name = "sharedwd",
406 description = "Setting up shared work directory",
407 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
408 workdir = ".",
409 haltOnFailure = True,
410 doStepIf = IsSharedWorkdir))
411
412 # find number of cores
413 factory.addStep(SetPropertyFromCommand(
414 name = "nproc",
415 property = "nproc",
416 description = "Finding number of CPUs",
417 command = ["nproc"]))
418
419 # prepare workspace
420 factory.addStep(FileDownload(
421 mastersrc = scripts_dir + '/cleanup.sh',
422 workerdest = "../cleanup.sh",
423 mode = 0o755))
424
425 if not persistent:
426 factory.addStep(ShellCommand(
427 name = "cleanold",
428 description = "Cleaning previous builds",
429 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
430 workdir = ".",
431 haltOnFailure = True,
432 timeout = 2400))
433
434 factory.addStep(ShellCommand(
435 name = "cleanup",
436 description = "Cleaning work area",
437 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
438 workdir = ".",
439 haltOnFailure = True,
440 timeout = 2400))
441
442 factory.addStep(ShellCommand(
443 name = "mksdkdir",
444 description = "Preparing SDK directory",
445 command = ["mkdir", "-p", "sdk"],
446 haltOnFailure = True))
447
448 factory.addStep(ShellCommand(
449 name = "downloadsdk",
450 description = "Downloading SDK archive",
451 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
452 env={'RSYNC_PASSWORD': rsync_sdk_key},
453 haltOnFailure = True,
454 logEnviron = False))
455
456 factory.addStep(ShellCommand(
457 name = "unpacksdk",
458 description = "Unpacking SDK archive",
459 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
460 haltOnFailure = True))
461
462 factory.addStep(ShellCommand(
463 name = "updatesdk",
464 description = "Updating SDK",
465 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
466 haltOnFailure = True))
467
468 factory.addStep(ShellCommand(
469 name = "cleancmdlinks",
470 description = "Sanitizing host command symlinks",
471 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
472 haltOnFailure = True))
473
474 factory.addStep(StringDownload(
475 name = "writeversionmk",
476 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
477 workerdest = "sdk/getversion.mk",
478 mode = 0o755))
479
480 factory.addStep(SetPropertyFromCommand(
481 name = "getversion",
482 property = "release_version",
483 description = "Finding SDK release version",
484 workdir = "build/sdk",
485 command = ["make", "-f", "getversion.mk"]))
486
487 # install build key
488 if usign_key is not None:
489 factory.addStep(StringDownload(
490 name = "dlkeybuildpub",
491 s = UsignSec2Pub(usign_key, usign_comment),
492 workerdest = "sdk/key-build.pub",
493 mode = 0o600))
494
495 factory.addStep(StringDownload(
496 name = "dlkeybuild",
497 s = "# fake private key",
498 workerdest = "sdk/key-build",
499 mode = 0o600))
500
501 factory.addStep(StringDownload(
502 name = "dlkeybuilducert",
503 s = "# fake certificate",
504 workerdest = "sdk/key-build.ucert",
505 mode = 0o600))
506
507 factory.addStep(ShellCommand(
508 name = "mkdldir",
509 description = "Preparing download directory",
510 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
511 haltOnFailure = True))
512
513 factory.addStep(ShellCommand(
514 name = "mkconf",
515 description = "Preparing SDK configuration",
516 workdir = "build/sdk",
517 command = ["sh", "-c", "rm -f .config && make defconfig"]))
518
519 factory.addStep(FileDownload(
520 mastersrc = scripts_dir + '/ccache.sh',
521 workerdest = 'sdk/ccache.sh',
522 mode = 0o755))
523
524 factory.addStep(ShellCommand(
525 name = "prepccache",
526 description = "Preparing ccache",
527 workdir = "build/sdk",
528 command = ["./ccache.sh"],
529 haltOnFailure = True))
530
531 factory.addStep(ShellCommand(
532 name = "updatefeeds",
533 description = "Updating feeds",
534 workdir = "build/sdk",
535 command = ["./scripts/feeds", "update", "-f"],
536 haltOnFailure = True))
537
538 factory.addStep(ShellCommand(
539 name = "installfeeds",
540 description = "Installing feeds",
541 workdir = "build/sdk",
542 command = ["./scripts/feeds", "install", "-a"],
543 haltOnFailure = True))
544
545 factory.addStep(ShellCommand(
546 name = "logclear",
547 description = "Clearing failure logs",
548 workdir = "build/sdk",
549 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
550 haltOnFailure = False,
551 flunkOnFailure = False,
552 warnOnFailure = True,
553 ))
554
555 factory.addStep(ShellCommand(
556 name = "compile",
557 description = "Building packages",
558 workdir = "build/sdk",
559 timeout = 3600,
560 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
561 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
562 haltOnFailure = True))
563
564 factory.addStep(ShellCommand(
565 name = "mkfeedsconf",
566 description = "Generating pinned feeds.conf",
567 workdir = "build/sdk",
568 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
569
570 factory.addStep(ShellCommand(
571 name = "checksums",
572 description = "Calculating checksums",
573 descriptionDone="Checksums calculated",
574 workdir = "build/sdk",
575 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
576 haltOnFailure = True
577 ))
578
579 if ini.has_option("gpg", "key") or usign_key is not None:
580 factory.addStep(MasterShellCommand(
581 name = "signprepare",
582 description = "Preparing temporary signing directory",
583 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
584 haltOnFailure = True
585 ))
586
587 factory.addStep(ShellCommand(
588 name = "signpack",
589 description = "Packing files to sign",
590 workdir = "build/sdk",
591 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
592 haltOnFailure = True
593 ))
594
595 factory.addStep(FileUpload(
596 workersrc = "sdk/sign.tar.gz",
597 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
598 haltOnFailure = True
599 ))
600
601 factory.addStep(MasterShellCommand(
602 name = "signfiles",
603 description = "Signing files",
604 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
605 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
606 haltOnFailure = True
607 ))
608
609 factory.addStep(FileDownload(
610 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
611 workerdest = "sdk/sign.tar.gz",
612 haltOnFailure = True
613 ))
614
615 factory.addStep(ShellCommand(
616 name = "signunpack",
617 description = "Unpacking signed files",
618 workdir = "build/sdk",
619 command = ["tar", "-xzf", "sign.tar.gz"],
620 haltOnFailure = True
621 ))
622
623 # download remote sha256sums to 'target-sha256sums'
624 factory.addStep(ShellCommand(
625 name = "target-sha256sums",
626 description = "Fetching remote sha256sums for arch",
627 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
628 env={'RSYNC_PASSWORD': rsync_bin_key},
629 logEnviron = False,
630 haltOnFailure = False,
631 flunkOnFailure = False,
632 warnOnFailure = False,
633 ))
634
635 factory.addStep(FileDownload(
636 name="dlrsync.sh",
637 mastersrc = scripts_dir + "/rsync.sh",
638 workerdest = "../rsync.sh",
639 mode = 0o755
640 ))
641
642 factory.addStep(FileDownload(
643 name = "dlsha2rsyncpl",
644 mastersrc = "sha2rsync.pl",
645 workerdest = "../sha2rsync.pl",
646 mode = 0o755,
647 ))
648
649 factory.addStep(ShellCommand(
650 name = "buildlist",
651 description = "Building list of files to upload",
652 workdir = "build/sdk",
653 command = ["../../../sha2rsync.pl", "../../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
654 haltOnFailure = True,
655 ))
656
657 factory.addStep(ShellCommand(
658 name = "uploadprepare",
659 description = "Preparing package directory",
660 workdir = "build/sdk",
661 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
662 env={'RSYNC_PASSWORD': rsync_bin_key},
663 haltOnFailure = True,
664 logEnviron = False
665 ))
666
667 factory.addStep(ShellCommand(
668 name = "packageupload",
669 description = "Uploading package files",
670 workdir = "build/sdk",
671 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
672 env={'RSYNC_PASSWORD': rsync_bin_key},
673 haltOnFailure = True,
674 logEnviron = False
675 ))
676
677 factory.addStep(ShellCommand(
678 name = "packageprune",
679 description = "Pruning package files",
680 workdir = "build/sdk",
681 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
682 env={'RSYNC_PASSWORD': rsync_bin_key},
683 haltOnFailure = True,
684 logEnviron = False
685 ))
686
687 factory.addStep(ShellCommand(
688 name = "logprepare",
689 description = "Preparing log directory",
690 workdir = "build/sdk",
691 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
692 env={'RSYNC_PASSWORD': rsync_bin_key},
693 haltOnFailure = True,
694 logEnviron = False
695 ))
696
697 factory.addStep(ShellCommand(
698 name = "logfind",
699 description = "Finding failure logs",
700 workdir = "build/sdk/logs/package/feeds",
701 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
702 haltOnFailure = False,
703 flunkOnFailure = False,
704 warnOnFailure = True,
705 ))
706
707 factory.addStep(ShellCommand(
708 name = "logcollect",
709 description = "Collecting failure logs",
710 workdir = "build/sdk",
711 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
712 haltOnFailure = False,
713 flunkOnFailure = False,
714 warnOnFailure = True,
715 ))
716
717 factory.addStep(ShellCommand(
718 name = "logupload",
719 description = "Uploading failure logs",
720 workdir = "build/sdk",
721 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
722 env={'RSYNC_PASSWORD': rsync_bin_key},
723 haltOnFailure = False,
724 flunkOnFailure = False,
725 warnOnFailure = True,
726 logEnviron = False
727 ))
728
729 if rsync_src_url is not None:
730 factory.addStep(ShellCommand(
731 name = "sourcelist",
732 description = "Finding source archives to upload",
733 workdir = "build/sdk",
734 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
735 haltOnFailure = True
736 ))
737
738 factory.addStep(ShellCommand(
739 name = "sourceupload",
740 description = "Uploading source archives",
741 workdir = "build/sdk",
742 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
743 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
744 env={'RSYNC_PASSWORD': rsync_src_key},
745 haltOnFailure = False,
746 flunkOnFailure = False,
747 warnOnFailure = True,
748 logEnviron = False
749 ))
750
751 factory.addStep(ShellCommand(
752 name = "df",
753 description = "Reporting disk usage",
754 command=["df", "-h", "."],
755 env={'LC_ALL': 'C'},
756 haltOnFailure = False,
757 flunkOnFailure = False,
758 warnOnFailure = False,
759 alwaysRun = True
760 ))
761
762 factory.addStep(ShellCommand(
763 name = "du",
764 description = "Reporting estimated file space usage",
765 command=["du", "-sh", "."],
766 env={'LC_ALL': 'C'},
767 haltOnFailure = False,
768 flunkOnFailure = False,
769 warnOnFailure = False,
770 alwaysRun = True
771 ))
772
773 factory.addStep(ShellCommand(
774 name = "ccachestat",
775 description = "Reporting ccache stats",
776 command=["ccache", "-s"],
777 want_stderr = False,
778 haltOnFailure = False,
779 flunkOnFailure = False,
780 warnOnFailure = False,
781 alwaysRun = True,
782 ))
783
784 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
785
786 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
787 force_factory.addStep(steps.Trigger(
788 name = "trigger_%s" % arch[0],
789 description = "Triggering %s build" % arch[0],
790 schedulerNames = [ "trigger_%s" % arch[0] ],
791 set_properties = { "reason": Property("reason") },
792 doStepIf = IsArchitectureSelected(arch[0])
793 ))
794
795 ####### STATUS arches
796
797 # 'status' is a list of Status arches. The results of each build will be
798 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
799 # including web pages, email senders, and IRC bots.
800
801 if ini.has_option("phase2", "status_bind"):
802 c['www'] = {
803 'port': ini.get("phase2", "status_bind"),
804 'plugins': {
805 'waterfall_view': True,
806 'console_view': True,
807 'grid_view': True
808 }
809 }
810
811 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
812 c['www']['auth'] = util.UserPasswordAuth([
813 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
814 ])
815 c['www']['authz'] = util.Authz(
816 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
817 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
818 )
819
820 ####### PROJECT IDENTITY
821
822 # the 'title' string will appear at the top of this buildbot
823 # installation's html.WebStatus home page (linked to the
824 # 'titleURL') and is embedded in the title of the waterfall HTML page.
825
826 c['title'] = ini.get("general", "title")
827 c['titleURL'] = ini.get("general", "title_url")
828
829 # the 'buildbotURL' string should point to the location where the buildbot's
830 # internal web server (usually the html.WebStatus page) is visible. This
831 # typically uses the port number set in the Waterfall 'status' entry, but
832 # with an externally-visible host name which the buildbot cannot figure out
833 # without some help.
834
835 c['buildbotURL'] = buildbot_url
836
837 ####### DB URL
838
839 c['db'] = {
840 # This specifies what database buildbot uses to store its state. You can leave
841 # this at its default for all but the largest installations.
842 'db_url' : "sqlite:///state.sqlite",
843 }
844
845 c['buildbotNetUsageData'] = None