phase2: do not exceed nproc build concurrency
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
38
39
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
43
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
46
47 buildbot_url = ini.get("phase2", "buildbot_url")
48
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
51
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
55
56 ####### BUILDWORKERS
57
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
61
62 worker_port = 9990
63 persistent = False
64 tree_expire = 0
65 git_ssh = False
66 git_ssh_key = None
67
68 if ini.has_option("phase2", "port"):
69 worker_port = ini.get("phase2", "port")
70
71 if ini.has_option("phase2", "persistent"):
72 persistent = ini.getboolean("phase2", "persistent")
73
74 if ini.has_option("phase2", "expire"):
75 tree_expire = ini.getint("phase2", "expire")
76
77 if ini.has_option("general", "git_ssh"):
78 git_ssh = ini.getboolean("general", "git_ssh")
79
80 if ini.has_option("general", "git_ssh_key"):
81 git_ssh_key = ini.get("general", "git_ssh_key")
82 else:
83 git_ssh = False
84
85 c['workers'] = []
86 max_builds = dict()
87
88 for section in ini.sections():
89 if section.startswith("worker "):
90 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
91 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
92 name = ini.get(section, "name")
93 password = ini.get(section, "password")
94 sl_props = { 'shared_wd': False }
95 max_builds[name] = 1
96
97 if ini.has_option(section, "builds"):
98 max_builds[name] = ini.getint(section, "builds")
99
100 if max_builds[name] == 1:
101 sl_props['shared_wd'] = True
102
103 if ini.has_option(section, "shared_wd"):
104 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
105 if sl_props['shared_wd'] and (max_builds != 1):
106 raise ValueError('max_builds must be 1 with shared workdir!')
107
108 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
109
110 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
111 # This must match the value configured into the buildworkers (with their
112 # --master option)
113 c['protocols'] = {'pb': {'port': worker_port}}
114
115 # coalesce builds
116 c['collapseRequests'] = True
117
118 # Reduce amount of backlog data
119 c['configurators'] = [util.JanitorConfigurator(
120 logHorizon=timedelta(days=3),
121 hour=6,
122 )]
123
124 ####### CHANGESOURCES
125
126 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
127 scripts_dir = os.path.abspath("../scripts")
128
129 rsync_bin_url = ini.get("rsync", "binary_url")
130 rsync_bin_key = ini.get("rsync", "binary_password")
131
132 rsync_src_url = None
133 rsync_src_key = None
134
135 if ini.has_option("rsync", "source_url"):
136 rsync_src_url = ini.get("rsync", "source_url")
137 rsync_src_key = ini.get("rsync", "source_password")
138
139 rsync_sdk_url = None
140 rsync_sdk_key = None
141 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
142
143 if ini.has_option("rsync", "sdk_url"):
144 rsync_sdk_url = ini.get("rsync", "sdk_url")
145
146 if ini.has_option("rsync", "sdk_password"):
147 rsync_sdk_key = ini.get("rsync", "sdk_password")
148
149 if ini.has_option("rsync", "sdk_pattern"):
150 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
151
152 repo_url = ini.get("repo", "url")
153 repo_branch = "master"
154
155 if ini.has_option("repo", "branch"):
156 repo_branch = ini.get("repo", "branch")
157
158 usign_key = None
159 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
160
161 if ini.has_option("usign", "key"):
162 usign_key = ini.get("usign", "key")
163
164 if ini.has_option("usign", "comment"):
165 usign_comment = ini.get("usign", "comment")
166
167
168 # find arches
169 arches = [ ]
170 archnames = [ ]
171
172 if not os.path.isdir(work_dir+'/source.git'):
173 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
174 else:
175 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
176
177 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
178 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
179 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
180
181 while True:
182 line = findarches.stdout.readline()
183 if not line:
184 break
185 at = line.decode().strip().split()
186 arches.append(at)
187 archnames.append(at[0])
188
189
190 # find feeds
191 feeds = []
192 feedbranches = dict()
193
194 c['change_source'] = []
195
196 def parse_feed_entry(line):
197 parts = line.strip().split()
198 if parts[0].startswith("src-git"):
199 feeds.append(parts)
200 url = parts[2].strip().split(';')
201 branch = url[1] if len(url) > 1 else 'master'
202 feedbranches[url[0]] = branch
203 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
204
205 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
206 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
207
208 line = make.stdout.readline()
209 if line:
210 parse_feed_entry(str(line, 'utf-8'))
211
212 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
213 for line in f:
214 parse_feed_entry(line)
215
216 if len(c['change_source']) == 0:
217 log.err("FATAL ERROR: no change_sources defined, aborting!")
218 sys.exit(-1)
219
220 ####### SCHEDULERS
221
222 # Configure the Schedulers, which decide how to react to incoming changes. In this
223 # case, just kick off a 'basebuild' build
224
225 c['schedulers'] = []
226 c['schedulers'].append(SingleBranchScheduler(
227 name = "all",
228 change_filter = filter.ChangeFilter(
229 filter_fn = lambda change: change.branch == feedbranches[change.repository]
230 ),
231 treeStableTimer = 60,
232 builderNames = archnames))
233
234 c['schedulers'].append(ForceScheduler(
235 name = "force",
236 buttonName = "Force builds",
237 label = "Force build details",
238 builderNames = [ "00_force_build" ],
239
240 codebases = [
241 util.CodebaseParameter(
242 "",
243 label = "Repository",
244 branch = util.FixedParameter(name = "branch", default = ""),
245 revision = util.FixedParameter(name = "revision", default = ""),
246 repository = util.FixedParameter(name = "repository", default = ""),
247 project = util.FixedParameter(name = "project", default = "")
248 )
249 ],
250
251 reason = util.StringParameter(
252 name = "reason",
253 label = "Reason",
254 default = "Trigger build",
255 required = True,
256 size = 80
257 ),
258
259 properties = [
260 util.NestedParameter(
261 name="options",
262 label="Build Options",
263 layout="vertical",
264 fields=[
265 util.ChoiceStringParameter(
266 name = "architecture",
267 label = "Build architecture",
268 default = "all",
269 choices = [ "all" ] + archnames
270 )
271 ]
272 )
273 ]
274 ))
275
276 ####### BUILDERS
277
278 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
279 # what steps, and which workers can execute them. Note that any particular build will
280 # only take place on one worker.
281
282 def GetDirectorySuffix(props):
283 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
284 if props.hasProperty("release_version"):
285 m = verpat.match(props["release_version"])
286 if m is not None:
287 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
288 return ""
289
290 def GetNumJobs(props):
291 if props.hasProperty("workername") and props.hasProperty("nproc"):
292 return (int(props["nproc"]) / max_builds[props["workername"]])
293 else:
294 return 1
295
296 def GetCwd(props):
297 if props.hasProperty("builddir"):
298 return props["builddir"]
299 elif props.hasProperty("workdir"):
300 return props["workdir"]
301 else:
302 return "/"
303
304 def IsArchitectureSelected(target):
305 def CheckArchitectureProperty(step):
306 try:
307 options = step.getProperty("options")
308 if type(options) is dict:
309 selected_arch = options.get("architecture", "all")
310 if selected_arch != "all" and selected_arch != target:
311 return False
312 except KeyError:
313 pass
314
315 return True
316
317 return CheckArchitectureProperty
318
319 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
320 try:
321 seckey = base64.b64decode(seckey)
322 except Exception:
323 return None
324
325 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
326 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
327
328 def IsSharedWorkdir(step):
329 return bool(step.getProperty("shared_wd"))
330
331 @defer.inlineCallbacks
332 def getNewestCompleteTime(bldr):
333 """Returns the complete_at of the latest completed and not SKIPPED
334 build request for this builder, or None if there are no such build
335 requests. We need to filter out SKIPPED requests because we're
336 using collapseRequests=True which is unfortunately marking all
337 previous requests as complete when new buildset is created.
338
339 @returns: datetime instance or None, via Deferred
340 """
341
342 bldrid = yield bldr.getBuilderId()
343 completed = yield bldr.master.data.get(
344 ('builders', bldrid, 'buildrequests'),
345 [
346 resultspec.Filter('complete', 'eq', [True]),
347 resultspec.Filter('results', 'ne', [results.SKIPPED]),
348 ],
349 order=['-complete_at'], limit=1)
350 if not completed:
351 return
352
353 complete_at = completed[0]['complete_at']
354
355 last_build = yield bldr.master.data.get(
356 ('builds', ),
357 [
358 resultspec.Filter('builderid', 'eq', [bldrid]),
359 ],
360 order=['-started_at'], limit=1)
361
362 if last_build and last_build[0]:
363 last_complete_at = last_build[0]['complete_at']
364 if last_complete_at and (last_complete_at > complete_at):
365 return last_complete_at
366
367 return complete_at
368
369 @defer.inlineCallbacks
370 def prioritizeBuilders(master, builders):
371 """Returns sorted list of builders by their last timestamp of completed and
372 not skipped build.
373
374 @returns: list of sorted builders
375 """
376
377 def is_building(bldr):
378 return bool(bldr.building) or bool(bldr.old_building)
379
380 def bldr_info(bldr):
381 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
382 d.addCallback(lambda complete_at: (complete_at, bldr))
383 return d
384
385 def bldr_sort(item):
386 (complete_at, bldr) = item
387
388 if not complete_at:
389 date = datetime.min
390 complete_at = date.replace(tzinfo=tzutc())
391
392 if is_building(bldr):
393 date = datetime.max
394 complete_at = date.replace(tzinfo=tzutc())
395
396 return (complete_at, bldr.name)
397
398 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
399 results.sort(key=bldr_sort)
400
401 for r in results:
402 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
403
404 return [r[1] for r in results]
405
406 c['prioritizeBuilders'] = prioritizeBuilders
407 c['builders'] = []
408
409 dlLock = locks.WorkerLock("worker_dl")
410
411 workerNames = [ ]
412
413 for worker in c['workers']:
414 workerNames.append(worker.workername)
415
416 force_factory = BuildFactory()
417
418 c['builders'].append(BuilderConfig(
419 name = "00_force_build",
420 workernames = workerNames,
421 factory = force_factory))
422
423 for arch in arches:
424 ts = arch[1].split('/')
425
426 factory = BuildFactory()
427
428 # setup shared work directory if required
429 factory.addStep(ShellCommand(
430 name = "sharedwd",
431 description = "Setting up shared work directory",
432 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
433 workdir = ".",
434 haltOnFailure = True,
435 doStepIf = IsSharedWorkdir))
436
437 # find number of cores
438 factory.addStep(SetProperty(
439 name = "nproc",
440 property = "nproc",
441 description = "Finding number of CPUs",
442 command = ["nproc"]))
443
444 # prepare workspace
445 factory.addStep(FileDownload(
446 mastersrc = scripts_dir + '/cleanup.sh',
447 workerdest = "../cleanup.sh",
448 mode = 0o755))
449
450 if not persistent:
451 factory.addStep(ShellCommand(
452 name = "cleanold",
453 description = "Cleaning previous builds",
454 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
455 workdir = ".",
456 haltOnFailure = True,
457 timeout = 2400))
458
459 factory.addStep(ShellCommand(
460 name = "cleanup",
461 description = "Cleaning work area",
462 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
463 workdir = ".",
464 haltOnFailure = True,
465 timeout = 2400))
466
467 # expire tree if needed
468 elif tree_expire > 0:
469 factory.addStep(FileDownload(
470 mastersrc = scripts_dir + '/expire.sh',
471 workerdest = "../expire.sh",
472 mode = 0o755))
473
474 factory.addStep(ShellCommand(
475 name = "expire",
476 description = "Checking for build tree expiry",
477 command = ["./expire.sh", str(tree_expire)],
478 workdir = ".",
479 haltOnFailure = True,
480 timeout = 2400))
481
482 factory.addStep(ShellCommand(
483 name = "mksdkdir",
484 description = "Preparing SDK directory",
485 command = ["mkdir", "-p", "sdk"],
486 haltOnFailure = True))
487
488 factory.addStep(ShellCommand(
489 name = "downloadsdk",
490 description = "Downloading SDK archive",
491 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
492 env={'RSYNC_PASSWORD': rsync_sdk_key},
493 haltOnFailure = True,
494 logEnviron = False))
495
496 factory.addStep(ShellCommand(
497 name = "unpacksdk",
498 description = "Unpacking SDK archive",
499 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
500 haltOnFailure = True))
501
502 factory.addStep(ShellCommand(
503 name = "updatesdk",
504 description = "Updating SDK",
505 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
506 haltOnFailure = True))
507
508 factory.addStep(ShellCommand(
509 name = "cleancmdlinks",
510 description = "Sanitizing host command symlinks",
511 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
512 haltOnFailure = True))
513
514 factory.addStep(StringDownload(
515 name = "writeversionmk",
516 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
517 workerdest = "sdk/getversion.mk",
518 mode = 0o755))
519
520 factory.addStep(SetProperty(
521 name = "getversion",
522 property = "release_version",
523 description = "Finding SDK release version",
524 workdir = "build/sdk",
525 command = ["make", "-f", "getversion.mk"]))
526
527 # install build key
528 if usign_key is not None:
529 factory.addStep(StringDownload(
530 name = "dlkeybuildpub",
531 s = UsignSec2Pub(usign_key, usign_comment),
532 workerdest = "sdk/key-build.pub",
533 mode = 0o600))
534
535 factory.addStep(StringDownload(
536 name = "dlkeybuild",
537 s = "# fake private key",
538 workerdest = "sdk/key-build",
539 mode = 0o600))
540
541 factory.addStep(StringDownload(
542 name = "dlkeybuilducert",
543 s = "# fake certificate",
544 workerdest = "sdk/key-build.ucert",
545 mode = 0o600))
546
547 factory.addStep(ShellCommand(
548 name = "mkdldir",
549 description = "Preparing download directory",
550 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
551 haltOnFailure = True))
552
553 factory.addStep(ShellCommand(
554 name = "mkconf",
555 description = "Preparing SDK configuration",
556 workdir = "build/sdk",
557 command = ["sh", "-c", "rm -f .config && make defconfig"]))
558
559 factory.addStep(FileDownload(
560 mastersrc = scripts_dir + '/ccache.sh',
561 workerdest = 'sdk/ccache.sh',
562 mode = 0o755))
563
564 factory.addStep(ShellCommand(
565 name = "prepccache",
566 description = "Preparing ccache",
567 workdir = "build/sdk",
568 command = ["./ccache.sh"],
569 haltOnFailure = True))
570
571 factory.addStep(ShellCommand(
572 name = "patchfeedsconfgitfull",
573 description = "Patching feeds.conf to use src-git-full",
574 workdir = "build/sdk",
575 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
576 haltOnFailure = True))
577
578 if git_ssh:
579 factory.addStep(StringDownload(
580 name = "dlgitclonekey",
581 s = git_ssh_key,
582 workerdest = "../git-clone.key",
583 mode = 0o600))
584
585 factory.addStep(ShellCommand(
586 name = "patchfeedsconf",
587 description = "Patching feeds.conf to use SSH cloning",
588 workdir = "build/sdk",
589 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
590 haltOnFailure = True))
591
592 factory.addStep(ShellCommand(
593 name = "updatefeeds",
594 description = "Updating feeds",
595 workdir = "build/sdk",
596 command = ["./scripts/feeds", "update", "-f"],
597 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
598 haltOnFailure = True))
599
600 if git_ssh:
601 factory.addStep(ShellCommand(
602 name = "rmfeedsconf",
603 description = "Removing feeds.conf",
604 workdir = "build/sdk",
605 command=["rm", "feeds.conf"],
606 haltOnFailure = True))
607
608 factory.addStep(ShellCommand(
609 name = "installfeeds",
610 description = "Installing feeds",
611 workdir = "build/sdk",
612 command = ["./scripts/feeds", "install", "-a"],
613 haltOnFailure = True))
614
615 factory.addStep(ShellCommand(
616 name = "logclear",
617 description = "Clearing failure logs",
618 workdir = "build/sdk",
619 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
620 haltOnFailure = False,
621 flunkOnFailure = False,
622 warnOnFailure = True,
623 ))
624
625 factory.addStep(ShellCommand(
626 name = "compile",
627 description = "Building packages",
628 workdir = "build/sdk",
629 timeout = 3600,
630 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
631 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
632 haltOnFailure = True))
633
634 factory.addStep(ShellCommand(
635 name = "mkfeedsconf",
636 description = "Generating pinned feeds.conf",
637 workdir = "build/sdk",
638 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
639
640 if ini.has_option("gpg", "key") or usign_key is not None:
641 factory.addStep(MasterShellCommand(
642 name = "signprepare",
643 description = "Preparing temporary signing directory",
644 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
645 haltOnFailure = True
646 ))
647
648 factory.addStep(ShellCommand(
649 name = "signpack",
650 description = "Packing files to sign",
651 workdir = "build/sdk",
652 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
653 haltOnFailure = True
654 ))
655
656 factory.addStep(FileUpload(
657 workersrc = "sdk/sign.tar.gz",
658 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
659 haltOnFailure = True
660 ))
661
662 factory.addStep(MasterShellCommand(
663 name = "signfiles",
664 description = "Signing files",
665 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
666 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
667 haltOnFailure = True
668 ))
669
670 factory.addStep(FileDownload(
671 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
672 workerdest = "sdk/sign.tar.gz",
673 haltOnFailure = True
674 ))
675
676 factory.addStep(ShellCommand(
677 name = "signunpack",
678 description = "Unpacking signed files",
679 workdir = "build/sdk",
680 command = ["tar", "-xzf", "sign.tar.gz"],
681 haltOnFailure = True
682 ))
683
684 factory.addStep(ShellCommand(
685 name = "uploadprepare",
686 description = "Preparing package directory",
687 workdir = "build/sdk",
688 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
689 env={'RSYNC_PASSWORD': rsync_bin_key},
690 haltOnFailure = True,
691 logEnviron = False
692 ))
693
694 factory.addStep(ShellCommand(
695 name = "packageupload",
696 description = "Uploading package files",
697 workdir = "build/sdk",
698 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
699 env={'RSYNC_PASSWORD': rsync_bin_key},
700 haltOnFailure = True,
701 logEnviron = False
702 ))
703
704 factory.addStep(ShellCommand(
705 name = "logprepare",
706 description = "Preparing log directory",
707 workdir = "build/sdk",
708 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
709 env={'RSYNC_PASSWORD': rsync_bin_key},
710 haltOnFailure = True,
711 logEnviron = False
712 ))
713
714 factory.addStep(ShellCommand(
715 name = "logfind",
716 description = "Finding failure logs",
717 workdir = "build/sdk/logs/package/feeds",
718 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
719 haltOnFailure = False,
720 flunkOnFailure = False,
721 warnOnFailure = True,
722 ))
723
724 factory.addStep(ShellCommand(
725 name = "logcollect",
726 description = "Collecting failure logs",
727 workdir = "build/sdk",
728 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
729 haltOnFailure = False,
730 flunkOnFailure = False,
731 warnOnFailure = True,
732 ))
733
734 factory.addStep(ShellCommand(
735 name = "logupload",
736 description = "Uploading failure logs",
737 workdir = "build/sdk",
738 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
739 env={'RSYNC_PASSWORD': rsync_bin_key},
740 haltOnFailure = False,
741 flunkOnFailure = False,
742 warnOnFailure = True,
743 logEnviron = False
744 ))
745
746 if rsync_src_url is not None:
747 factory.addStep(ShellCommand(
748 name = "sourcelist",
749 description = "Finding source archives to upload",
750 workdir = "build/sdk",
751 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
752 haltOnFailure = True
753 ))
754
755 factory.addStep(ShellCommand(
756 name = "sourceupload",
757 description = "Uploading source archives",
758 workdir = "build/sdk",
759 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
760 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
761 env={'RSYNC_PASSWORD': rsync_src_key},
762 haltOnFailure = False,
763 flunkOnFailure = False,
764 warnOnFailure = True,
765 logEnviron = False
766 ))
767
768 factory.addStep(ShellCommand(
769 name = "df",
770 description = "Reporting disk usage",
771 command=["df", "-h", "."],
772 env={'LC_ALL': 'C'},
773 haltOnFailure = False,
774 flunkOnFailure = False,
775 warnOnFailure = False,
776 alwaysRun = True
777 ))
778
779 factory.addStep(ShellCommand(
780 name = "du",
781 description = "Reporting estimated file space usage",
782 command=["du", "-sh", "."],
783 env={'LC_ALL': 'C'},
784 haltOnFailure = False,
785 flunkOnFailure = False,
786 warnOnFailure = False,
787 alwaysRun = True
788 ))
789
790 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
791
792 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
793 force_factory.addStep(steps.Trigger(
794 name = "trigger_%s" % arch[0],
795 description = "Triggering %s build" % arch[0],
796 schedulerNames = [ "trigger_%s" % arch[0] ],
797 set_properties = { "reason": Property("reason") },
798 doStepIf = IsArchitectureSelected(arch[0])
799 ))
800
801 ####### STATUS arches
802
803 # 'status' is a list of Status arches. The results of each build will be
804 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
805 # including web pages, email senders, and IRC bots.
806
807 if ini.has_option("phase2", "status_bind"):
808 c['www'] = {
809 'port': ini.get("phase2", "status_bind"),
810 'plugins': {
811 'waterfall_view': True,
812 'console_view': True,
813 'grid_view': True
814 }
815 }
816
817 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
818 c['www']['auth'] = util.UserPasswordAuth([
819 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
820 ])
821 c['www']['authz'] = util.Authz(
822 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
823 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
824 )
825
826 ####### PROJECT IDENTITY
827
828 # the 'title' string will appear at the top of this buildbot
829 # installation's html.WebStatus home page (linked to the
830 # 'titleURL') and is embedded in the title of the waterfall HTML page.
831
832 c['title'] = ini.get("general", "title")
833 c['titleURL'] = ini.get("general", "title_url")
834
835 # the 'buildbotURL' string should point to the location where the buildbot's
836 # internal web server (usually the html.WebStatus page) is visible. This
837 # typically uses the port number set in the Waterfall 'status' entry, but
838 # with an externally-visible host name which the buildbot cannot figure out
839 # without some help.
840
841 c['buildbotURL'] = buildbot_url
842
843 ####### DB URL
844
845 c['db'] = {
846 # This specifies what database buildbot uses to store its state. You can leave
847 # this at its default for all but the largest installations.
848 'db_url' : "sqlite:///state.sqlite",
849 }
850
851 c['buildbotNetUsageData'] = None