23a67f7324e3595170f91f71f4d2de48bab10657
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65
66 if ini.has_option("phase2", "port"):
67 worker_port = ini.get("phase2", "port")
68
69 if ini.has_option("phase2", "persistent"):
70 persistent = ini.getboolean("phase2", "persistent")
71
72 c['workers'] = []
73
74 for section in ini.sections():
75 if section.startswith("worker "):
76 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
77 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
78 name = ini.get(section, "name")
79 password = ini.get(section, "password")
80 sl_props = { 'shared_wd': True }
81
82 if ini.has_option(section, "shared_wd"):
83 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
84
85 c['workers'].append(Worker(name, password, max_builds = 1, properties = sl_props))
86
87 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
88 # This must match the value configured into the buildworkers (with their
89 # --master option)
90 c['protocols'] = {'pb': {'port': worker_port}}
91
92 # coalesce builds
93 c['collapseRequests'] = True
94
95 # Reduce amount of backlog data
96 c['configurators'] = [util.JanitorConfigurator(
97 logHorizon=timedelta(days=3),
98 hour=6,
99 )]
100
101 ####### CHANGESOURCES
102
103 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
104 scripts_dir = os.path.abspath("../scripts")
105
106 rsync_bin_url = ini.get("rsync", "binary_url")
107 rsync_bin_key = ini.get("rsync", "binary_password")
108
109 rsync_src_url = None
110 rsync_src_key = None
111
112 if ini.has_option("rsync", "source_url"):
113 rsync_src_url = ini.get("rsync", "source_url")
114 rsync_src_key = ini.get("rsync", "source_password")
115
116 rsync_sdk_url = None
117 rsync_sdk_key = None
118 rsync_sdk_pat = "openwrt-sdk-*.tar.*"
119
120 if ini.has_option("rsync", "sdk_url"):
121 rsync_sdk_url = ini.get("rsync", "sdk_url")
122
123 if ini.has_option("rsync", "sdk_password"):
124 rsync_sdk_key = ini.get("rsync", "sdk_password")
125
126 if ini.has_option("rsync", "sdk_pattern"):
127 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
128
129 rsync_defopts = ["-4", "-v", "--timeout=120"]
130
131 repo_url = ini.get("repo", "url")
132 repo_branch = "master"
133
134 if ini.has_option("repo", "branch"):
135 repo_branch = ini.get("repo", "branch")
136
137 usign_key = None
138 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
139
140 if ini.has_option("usign", "key"):
141 usign_key = ini.get("usign", "key")
142
143 if ini.has_option("usign", "comment"):
144 usign_comment = ini.get("usign", "comment")
145
146
147 # find arches
148 arches = [ ]
149 archnames = [ ]
150
151 if not os.path.isdir(work_dir+'/source.git'):
152 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
153 else:
154 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
155
156 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
157 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
158 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
159
160 while True:
161 line = findarches.stdout.readline()
162 if not line:
163 break
164 at = line.decode().strip().split()
165 arches.append(at)
166 archnames.append(at[0])
167
168
169 # find feeds
170 feeds = []
171 feedbranches = dict()
172
173 c['change_source'] = []
174
175 def parse_feed_entry(line):
176 parts = line.strip().split()
177 if parts[0].startswith("src-git"):
178 feeds.append(parts)
179 url = parts[2].strip().split(';')
180 branch = url[1] if len(url) > 1 else 'master'
181 feedbranches[url[0]] = branch
182 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
183
184 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
185 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
186
187 line = make.stdout.readline()
188 if line:
189 parse_feed_entry(str(line, 'utf-8'))
190
191 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
192 for line in f:
193 parse_feed_entry(line)
194
195 if len(c['change_source']) == 0:
196 log.err("FATAL ERROR: no change_sources defined, aborting!")
197 sys.exit(-1)
198
199 ####### SCHEDULERS
200
201 # Configure the Schedulers, which decide how to react to incoming changes. In this
202 # case, just kick off a 'basebuild' build
203
204 c['schedulers'] = []
205 c['schedulers'].append(SingleBranchScheduler(
206 name = "all",
207 change_filter = filter.ChangeFilter(
208 filter_fn = lambda change: change.branch == feedbranches[change.repository]
209 ),
210 treeStableTimer = 60,
211 builderNames = archnames))
212
213 c['schedulers'].append(ForceScheduler(
214 name = "force",
215 buttonName = "Force builds",
216 label = "Force build details",
217 builderNames = [ "00_force_build" ],
218
219 codebases = [
220 util.CodebaseParameter(
221 "",
222 label = "Repository",
223 branch = util.FixedParameter(name = "branch", default = ""),
224 revision = util.FixedParameter(name = "revision", default = ""),
225 repository = util.FixedParameter(name = "repository", default = ""),
226 project = util.FixedParameter(name = "project", default = "")
227 )
228 ],
229
230 reason = util.StringParameter(
231 name = "reason",
232 label = "Reason",
233 default = "Trigger build",
234 required = True,
235 size = 80
236 ),
237
238 properties = [
239 util.NestedParameter(
240 name="options",
241 label="Build Options",
242 layout="vertical",
243 fields=[
244 util.ChoiceStringParameter(
245 name = "architecture",
246 label = "Build architecture",
247 default = "all",
248 choices = [ "all" ] + archnames
249 )
250 ]
251 )
252 ]
253 ))
254
255 ####### BUILDERS
256
257 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
258 # what steps, and which workers can execute them. Note that any particular build will
259 # only take place on one worker.
260
261 @properties.renderer
262 def GetDirectorySuffix(props):
263 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
264 if props.hasProperty("release_version"):
265 m = verpat.match(props["release_version"])
266 if m is not None:
267 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
268 return ""
269
270 @properties.renderer
271 def GetCwd(props):
272 if props.hasProperty("builddir"):
273 return props["builddir"]
274 elif props.hasProperty("workdir"):
275 return props["workdir"]
276 else:
277 return "/"
278
279 def IsArchitectureSelected(target):
280 def CheckArchitectureProperty(step):
281 try:
282 options = step.getProperty("options")
283 if isinstance(options, dict):
284 selected_arch = options.get("architecture", "all")
285 if selected_arch != "all" and selected_arch != target:
286 return False
287 except KeyError:
288 pass
289
290 return True
291
292 return CheckArchitectureProperty
293
294 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
295 try:
296 seckey = base64.b64decode(seckey)
297 except Exception:
298 return None
299
300 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
301 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
302
303 def IsSharedWorkdir(step):
304 return bool(step.getProperty("shared_wd"))
305
306 @defer.inlineCallbacks
307 def getNewestCompleteTime(bldr):
308 """Returns the complete_at of the latest completed and not SKIPPED
309 build request for this builder, or None if there are no such build
310 requests. We need to filter out SKIPPED requests because we're
311 using collapseRequests=True which is unfortunately marking all
312 previous requests as complete when new buildset is created.
313
314 @returns: datetime instance or None, via Deferred
315 """
316
317 bldrid = yield bldr.getBuilderId()
318 completed = yield bldr.master.data.get(
319 ('builders', bldrid, 'buildrequests'),
320 [
321 resultspec.Filter('complete', 'eq', [True]),
322 resultspec.Filter('results', 'ne', [results.SKIPPED]),
323 ],
324 order=['-complete_at'], limit=1)
325 if not completed:
326 return
327
328 complete_at = completed[0]['complete_at']
329
330 last_build = yield bldr.master.data.get(
331 ('builds', ),
332 [
333 resultspec.Filter('builderid', 'eq', [bldrid]),
334 ],
335 order=['-started_at'], limit=1)
336
337 if last_build and last_build[0]:
338 last_complete_at = last_build[0]['complete_at']
339 if last_complete_at and (last_complete_at > complete_at):
340 return last_complete_at
341
342 return complete_at
343
344 @defer.inlineCallbacks
345 def prioritizeBuilders(master, builders):
346 """Returns sorted list of builders by their last timestamp of completed and
347 not skipped build.
348
349 @returns: list of sorted builders
350 """
351
352 def is_building(bldr):
353 return bool(bldr.building) or bool(bldr.old_building)
354
355 def bldr_info(bldr):
356 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
357 d.addCallback(lambda complete_at: (complete_at, bldr))
358 return d
359
360 def bldr_sort(item):
361 (complete_at, bldr) = item
362
363 if not complete_at:
364 date = datetime.min
365 complete_at = date.replace(tzinfo=tzutc())
366
367 if is_building(bldr):
368 date = datetime.max
369 complete_at = date.replace(tzinfo=tzutc())
370
371 return (complete_at, bldr.name)
372
373 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
374 results.sort(key=bldr_sort)
375
376 for r in results:
377 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
378
379 return [r[1] for r in results]
380
381 c['prioritizeBuilders'] = prioritizeBuilders
382 c['builders'] = []
383
384 dlLock = locks.WorkerLock("worker_dl")
385
386 workerNames = [ ]
387
388 for worker in c['workers']:
389 workerNames.append(worker.workername)
390
391 force_factory = BuildFactory()
392
393 c['builders'].append(BuilderConfig(
394 name = "00_force_build",
395 workernames = workerNames,
396 factory = force_factory))
397
398 for arch in arches:
399 ts = arch[1].split('/')
400
401 factory = BuildFactory()
402
403 # setup shared work directory if required
404 factory.addStep(ShellCommand(
405 name = "sharedwd",
406 description = "Setting up shared work directory",
407 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
408 workdir = ".",
409 haltOnFailure = True,
410 doStepIf = IsSharedWorkdir))
411
412 # find number of cores
413 factory.addStep(SetPropertyFromCommand(
414 name = "nproc",
415 property = "nproc",
416 description = "Finding number of CPUs",
417 command = ["nproc"]))
418
419 # prepare workspace
420 factory.addStep(FileDownload(
421 mastersrc = scripts_dir + '/cleanup.sh',
422 workerdest = "../cleanup.sh",
423 mode = 0o755))
424
425 if not persistent:
426 factory.addStep(ShellCommand(
427 name = "cleanold",
428 description = "Cleaning previous builds",
429 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
430 workdir = ".",
431 haltOnFailure = True,
432 timeout = 2400))
433
434 factory.addStep(ShellCommand(
435 name = "cleanup",
436 description = "Cleaning work area",
437 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
438 workdir = ".",
439 haltOnFailure = True,
440 timeout = 2400))
441
442 factory.addStep(ShellCommand(
443 name = "mksdkdir",
444 description = "Preparing SDK directory",
445 command = ["mkdir", "-p", "sdk"],
446 haltOnFailure = True))
447
448 factory.addStep(ShellCommand(
449 name = "downloadsdk",
450 description = "Downloading SDK archive",
451 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
452 env={'RSYNC_PASSWORD': rsync_sdk_key},
453 haltOnFailure = True,
454 logEnviron = False))
455
456 factory.addStep(ShellCommand(
457 name = "unpacksdk",
458 description = "Unpacking SDK archive",
459 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
460 haltOnFailure = True))
461
462 factory.addStep(ShellCommand(
463 name = "updatesdk",
464 description = "Updating SDK",
465 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
466 haltOnFailure = True))
467
468 factory.addStep(ShellCommand(
469 name = "cleancmdlinks",
470 description = "Sanitizing host command symlinks",
471 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
472 haltOnFailure = True))
473
474 factory.addStep(StringDownload(
475 name = "writeversionmk",
476 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
477 workerdest = "sdk/getversion.mk",
478 mode = 0o755))
479
480 factory.addStep(SetPropertyFromCommand(
481 name = "getversion",
482 property = "release_version",
483 description = "Finding SDK release version",
484 workdir = "build/sdk",
485 command = ["make", "-f", "getversion.mk"]))
486
487 # install build key
488 if usign_key is not None:
489 factory.addStep(StringDownload(
490 name = "dlkeybuildpub",
491 s = UsignSec2Pub(usign_key, usign_comment),
492 workerdest = "sdk/key-build.pub",
493 mode = 0o600))
494
495 factory.addStep(StringDownload(
496 name = "dlkeybuild",
497 s = "# fake private key",
498 workerdest = "sdk/key-build",
499 mode = 0o600))
500
501 factory.addStep(StringDownload(
502 name = "dlkeybuilducert",
503 s = "# fake certificate",
504 workerdest = "sdk/key-build.ucert",
505 mode = 0o600))
506
507 factory.addStep(ShellCommand(
508 name = "mkdldir",
509 description = "Preparing download directory",
510 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
511 haltOnFailure = True))
512
513 factory.addStep(ShellCommand(
514 name = "mkconf",
515 description = "Preparing SDK configuration",
516 workdir = "build/sdk",
517 command = ["sh", "-c", "rm -f .config && make defconfig"]))
518
519 factory.addStep(FileDownload(
520 mastersrc = scripts_dir + '/ccache.sh',
521 workerdest = 'sdk/ccache.sh',
522 mode = 0o755))
523
524 factory.addStep(ShellCommand(
525 name = "prepccache",
526 description = "Preparing ccache",
527 workdir = "build/sdk",
528 command = ["./ccache.sh"],
529 haltOnFailure = True))
530
531 factory.addStep(ShellCommand(
532 name = "updatefeeds",
533 description = "Updating feeds",
534 workdir = "build/sdk",
535 command = ["./scripts/feeds", "update", "-f"],
536 haltOnFailure = True))
537
538 factory.addStep(ShellCommand(
539 name = "installfeeds",
540 description = "Installing feeds",
541 workdir = "build/sdk",
542 command = ["./scripts/feeds", "install", "-a"],
543 haltOnFailure = True))
544
545 factory.addStep(ShellCommand(
546 name = "logclear",
547 description = "Clearing failure logs",
548 workdir = "build/sdk",
549 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
550 haltOnFailure = False,
551 flunkOnFailure = False,
552 warnOnFailure = True,
553 ))
554
555 factory.addStep(ShellCommand(
556 name = "compile",
557 description = "Building packages",
558 workdir = "build/sdk",
559 timeout = 3600,
560 command = ["make", Interpolate("-j%(prop:nproc:-1)s"), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
561 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
562 haltOnFailure = True))
563
564 factory.addStep(ShellCommand(
565 name = "mkfeedsconf",
566 description = "Generating pinned feeds.conf",
567 workdir = "build/sdk",
568 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
569
570 factory.addStep(ShellCommand(
571 name = "checksums",
572 description = "Calculating checksums",
573 descriptionDone="Checksums calculated",
574 workdir = "build/sdk",
575 command = "cd bin/packages/%s; " %(arch[0])
576 + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | "
577 + "sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | "
578 + r"sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums",
579 haltOnFailure = True
580 ))
581
582 if ini.has_option("gpg", "key") or usign_key is not None:
583 factory.addStep(MasterShellCommand(
584 name = "signprepare",
585 description = "Preparing temporary signing directory",
586 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
587 haltOnFailure = True
588 ))
589
590 factory.addStep(ShellCommand(
591 name = "signpack",
592 description = "Packing files to sign",
593 workdir = "build/sdk",
594 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
595 haltOnFailure = True
596 ))
597
598 factory.addStep(FileUpload(
599 workersrc = "sdk/sign.tar.gz",
600 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
601 haltOnFailure = True
602 ))
603
604 factory.addStep(MasterShellCommand(
605 name = "signfiles",
606 description = "Signing files",
607 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
608 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
609 haltOnFailure = True
610 ))
611
612 factory.addStep(FileDownload(
613 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
614 workerdest = "sdk/sign.tar.gz",
615 haltOnFailure = True
616 ))
617
618 factory.addStep(ShellCommand(
619 name = "signunpack",
620 description = "Unpacking signed files",
621 workdir = "build/sdk",
622 command = ["tar", "-xzf", "sign.tar.gz"],
623 haltOnFailure = True
624 ))
625
626 # download remote sha256sums to 'target-sha256sums'
627 factory.addStep(ShellCommand(
628 name = "target-sha256sums",
629 description = "Fetching remote sha256sums for arch",
630 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
631 env={'RSYNC_PASSWORD': rsync_bin_key},
632 logEnviron = False,
633 haltOnFailure = False,
634 flunkOnFailure = False,
635 warnOnFailure = False,
636 ))
637
638 factory.addStep(FileDownload(
639 name="dlrsync.sh",
640 mastersrc = scripts_dir + "/rsync.sh",
641 workerdest = "../rsync.sh",
642 mode = 0o755
643 ))
644
645 factory.addStep(FileDownload(
646 name = "dlsha2rsyncpl",
647 mastersrc = scripts_dir + "/sha2rsync.pl",
648 workerdest = "../sha2rsync.pl",
649 mode = 0o755,
650 ))
651
652 factory.addStep(ShellCommand(
653 name = "buildlist",
654 description = "Building list of files to upload",
655 workdir = "build/sdk",
656 command = ["../../sha2rsync.pl", "../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
657 haltOnFailure = True,
658 ))
659
660 factory.addStep(ShellCommand(
661 name = "uploadprepare",
662 description = "Preparing package directory",
663 workdir = "build/sdk",
664 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
665 env={'RSYNC_PASSWORD': rsync_bin_key},
666 haltOnFailure = True,
667 logEnviron = False
668 ))
669
670 factory.addStep(ShellCommand(
671 name = "packageupload",
672 description = "Uploading package files",
673 workdir = "build/sdk",
674 command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
675 env={'RSYNC_PASSWORD': rsync_bin_key},
676 haltOnFailure = True,
677 logEnviron = False
678 ))
679
680 factory.addStep(ShellCommand(
681 name = "packageprune",
682 description = "Pruning package files",
683 workdir = "build/sdk",
684 command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
685 env={'RSYNC_PASSWORD': rsync_bin_key},
686 haltOnFailure = True,
687 logEnviron = False
688 ))
689
690 factory.addStep(ShellCommand(
691 name = "logprepare",
692 description = "Preparing log directory",
693 workdir = "build/sdk",
694 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
695 env={'RSYNC_PASSWORD': rsync_bin_key},
696 haltOnFailure = True,
697 logEnviron = False
698 ))
699
700 factory.addStep(ShellCommand(
701 name = "logfind",
702 description = "Finding failure logs",
703 workdir = "build/sdk/logs/package/feeds",
704 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
705 haltOnFailure = False,
706 flunkOnFailure = False,
707 warnOnFailure = True,
708 ))
709
710 factory.addStep(ShellCommand(
711 name = "logcollect",
712 description = "Collecting failure logs",
713 workdir = "build/sdk",
714 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
715 haltOnFailure = False,
716 flunkOnFailure = False,
717 warnOnFailure = True,
718 ))
719
720 factory.addStep(ShellCommand(
721 name = "logupload",
722 description = "Uploading failure logs",
723 workdir = "build/sdk",
724 command = ["../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
725 env={'RSYNC_PASSWORD': rsync_bin_key},
726 haltOnFailure = False,
727 flunkOnFailure = False,
728 warnOnFailure = True,
729 logEnviron = False
730 ))
731
732 if rsync_src_url is not None:
733 factory.addStep(ShellCommand(
734 name = "sourcelist",
735 description = "Finding source archives to upload",
736 workdir = "build/sdk",
737 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
738 haltOnFailure = True
739 ))
740
741 factory.addStep(ShellCommand(
742 name = "sourceupload",
743 description = "Uploading source archives",
744 workdir = "build/sdk",
745 command = ["../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
746 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
747 env={'RSYNC_PASSWORD': rsync_src_key},
748 haltOnFailure = False,
749 flunkOnFailure = False,
750 warnOnFailure = True,
751 logEnviron = False
752 ))
753
754 factory.addStep(ShellCommand(
755 name = "df",
756 description = "Reporting disk usage",
757 command=["df", "-h", "."],
758 env={'LC_ALL': 'C'},
759 haltOnFailure = False,
760 flunkOnFailure = False,
761 warnOnFailure = False,
762 alwaysRun = True
763 ))
764
765 factory.addStep(ShellCommand(
766 name = "du",
767 description = "Reporting estimated file space usage",
768 command=["du", "-sh", "."],
769 env={'LC_ALL': 'C'},
770 haltOnFailure = False,
771 flunkOnFailure = False,
772 warnOnFailure = False,
773 alwaysRun = True
774 ))
775
776 factory.addStep(ShellCommand(
777 name = "ccachestat",
778 description = "Reporting ccache stats",
779 command=["ccache", "-s"],
780 want_stderr = False,
781 haltOnFailure = False,
782 flunkOnFailure = False,
783 warnOnFailure = False,
784 alwaysRun = True,
785 ))
786
787 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
788
789 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
790 force_factory.addStep(steps.Trigger(
791 name = "trigger_%s" % arch[0],
792 description = "Triggering %s build" % arch[0],
793 schedulerNames = [ "trigger_%s" % arch[0] ],
794 set_properties = { "reason": Property("reason") },
795 doStepIf = IsArchitectureSelected(arch[0])
796 ))
797
798 ####### STATUS arches
799
800 # 'status' is a list of Status arches. The results of each build will be
801 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
802 # including web pages, email senders, and IRC bots.
803
804 if ini.has_option("phase2", "status_bind"):
805 c['www'] = {
806 'port': ini.get("phase2", "status_bind"),
807 'plugins': {
808 'waterfall_view': True,
809 'console_view': True,
810 'grid_view': True
811 }
812 }
813
814 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
815 c['www']['auth'] = util.UserPasswordAuth([
816 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
817 ])
818 c['www']['authz'] = util.Authz(
819 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
820 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
821 )
822
823 ####### PROJECT IDENTITY
824
825 # the 'title' string will appear at the top of this buildbot
826 # installation's html.WebStatus home page (linked to the
827 # 'titleURL') and is embedded in the title of the waterfall HTML page.
828
829 c['title'] = ini.get("general", "title")
830 c['titleURL'] = ini.get("general", "title_url")
831
832 # the 'buildbotURL' string should point to the location where the buildbot's
833 # internal web server (usually the html.WebStatus page) is visible. This
834 # typically uses the port number set in the Waterfall 'status' entry, but
835 # with an externally-visible host name which the buildbot cannot figure out
836 # without some help.
837
838 c['buildbotURL'] = buildbot_url
839
840 ####### DB URL
841
842 c['db'] = {
843 # This specifies what database buildbot uses to store its state. You can leave
844 # this at its default for all but the largest installations.
845 'db_url' : "sqlite:///state.sqlite",
846 }
847
848 c['buildbotNetUsageData'] = None