Bump buildbot version to latest stable release 3.8.0
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import random
9 import subprocess
10 import configparser
11
12 from dateutil.tz import tzutc
13 from datetime import datetime, timedelta
14
15 from twisted.internet import defer
16 from twisted.python import log
17
18 from buildbot import locks
19 from buildbot.data import resultspec
20 from buildbot.changes import filter
21 from buildbot.changes.gitpoller import GitPoller
22 from buildbot.config import BuilderConfig
23 from buildbot.plugins import schedulers
24 from buildbot.plugins import steps
25 from buildbot.plugins import util
26 from buildbot.process import results
27 from buildbot.process.factory import BuildFactory
28 from buildbot.process.properties import Property
29 from buildbot.process.properties import WithProperties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetProperty
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65 other_builds = 0
66 tree_expire = 0
67 git_ssh = False
68 git_ssh_key = None
69
70 if ini.has_option("phase2", "port"):
71 worker_port = ini.get("phase2", "port")
72
73 if ini.has_option("phase2", "persistent"):
74 persistent = ini.getboolean("phase2", "persistent")
75
76 if ini.has_option("phase2", "other_builds"):
77 other_builds = ini.getint("phase2", "other_builds")
78
79 if ini.has_option("phase2", "expire"):
80 tree_expire = ini.getint("phase2", "expire")
81
82 if ini.has_option("general", "git_ssh"):
83 git_ssh = ini.getboolean("general", "git_ssh")
84
85 if ini.has_option("general", "git_ssh_key"):
86 git_ssh_key = ini.get("general", "git_ssh_key")
87 else:
88 git_ssh = False
89
90 c['workers'] = []
91 max_builds = dict()
92
93 for section in ini.sections():
94 if section.startswith("worker "):
95 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
96 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
97 name = ini.get(section, "name")
98 password = ini.get(section, "password")
99 sl_props = { 'shared_wd': False }
100 max_builds[name] = 1
101
102 if ini.has_option(section, "builds"):
103 max_builds[name] = ini.getint(section, "builds")
104
105 if max_builds[name] == 1:
106 sl_props['shared_wd'] = True
107
108 if ini.has_option(section, "shared_wd"):
109 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
110 if sl_props['shared_wd'] and (max_builds != 1):
111 raise ValueError('max_builds must be 1 with shared workdir!')
112
113 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
114
115 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
116 # This must match the value configured into the buildworkers (with their
117 # --master option)
118 c['protocols'] = {'pb': {'port': worker_port}}
119
120 # coalesce builds
121 c['collapseRequests'] = True
122
123 # Reduce amount of backlog data
124 c['configurators'] = [util.JanitorConfigurator(
125 logHorizon=timedelta(days=3),
126 hour=6,
127 )]
128
129 ####### CHANGESOURCES
130
131 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
132 scripts_dir = os.path.abspath("../scripts")
133
134 rsync_bin_url = ini.get("rsync", "binary_url")
135 rsync_bin_key = ini.get("rsync", "binary_password")
136
137 rsync_src_url = None
138 rsync_src_key = None
139
140 if ini.has_option("rsync", "source_url"):
141 rsync_src_url = ini.get("rsync", "source_url")
142 rsync_src_key = ini.get("rsync", "source_password")
143
144 rsync_sdk_url = None
145 rsync_sdk_key = None
146 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
147
148 if ini.has_option("rsync", "sdk_url"):
149 rsync_sdk_url = ini.get("rsync", "sdk_url")
150
151 if ini.has_option("rsync", "sdk_password"):
152 rsync_sdk_key = ini.get("rsync", "sdk_password")
153
154 if ini.has_option("rsync", "sdk_pattern"):
155 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
156
157 repo_url = ini.get("repo", "url")
158 repo_branch = "master"
159
160 if ini.has_option("repo", "branch"):
161 repo_branch = ini.get("repo", "branch")
162
163 usign_key = None
164 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
165
166 if ini.has_option("usign", "key"):
167 usign_key = ini.get("usign", "key")
168
169 if ini.has_option("usign", "comment"):
170 usign_comment = ini.get("usign", "comment")
171
172
173 # find arches
174 arches = [ ]
175 archnames = [ ]
176
177 if not os.path.isdir(work_dir+'/source.git'):
178 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
179 else:
180 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
181
182 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
183 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
184 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
185
186 while True:
187 line = findarches.stdout.readline()
188 if not line:
189 break
190 at = line.decode().strip().split()
191 arches.append(at)
192 archnames.append(at[0])
193
194
195 # find feeds
196 feeds = []
197 feedbranches = dict()
198
199 c['change_source'] = []
200
201 def parse_feed_entry(line):
202 parts = line.strip().split()
203 if parts[0].startswith("src-git"):
204 feeds.append(parts)
205 url = parts[2].strip().split(';')
206 branch = url[1] if len(url) > 1 else 'master'
207 feedbranches[url[0]] = branch
208 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
209
210 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
211 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
212
213 line = make.stdout.readline()
214 if line:
215 parse_feed_entry(str(line, 'utf-8'))
216
217 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
218 for line in f:
219 parse_feed_entry(line)
220
221 if len(c['change_source']) == 0:
222 log.err("FATAL ERROR: no change_sources defined, aborting!")
223 sys.exit(-1)
224
225 ####### SCHEDULERS
226
227 # Configure the Schedulers, which decide how to react to incoming changes. In this
228 # case, just kick off a 'basebuild' build
229
230 c['schedulers'] = []
231 c['schedulers'].append(SingleBranchScheduler(
232 name = "all",
233 change_filter = filter.ChangeFilter(
234 filter_fn = lambda change: change.branch == feedbranches[change.repository]
235 ),
236 treeStableTimer = 60,
237 builderNames = archnames))
238
239 c['schedulers'].append(ForceScheduler(
240 name = "force",
241 buttonName = "Force builds",
242 label = "Force build details",
243 builderNames = [ "00_force_build" ],
244
245 codebases = [
246 util.CodebaseParameter(
247 "",
248 label = "Repository",
249 branch = util.FixedParameter(name = "branch", default = ""),
250 revision = util.FixedParameter(name = "revision", default = ""),
251 repository = util.FixedParameter(name = "repository", default = ""),
252 project = util.FixedParameter(name = "project", default = "")
253 )
254 ],
255
256 reason = util.StringParameter(
257 name = "reason",
258 label = "Reason",
259 default = "Trigger build",
260 required = True,
261 size = 80
262 ),
263
264 properties = [
265 util.NestedParameter(
266 name="options",
267 label="Build Options",
268 layout="vertical",
269 fields=[
270 util.ChoiceStringParameter(
271 name = "architecture",
272 label = "Build architecture",
273 default = "all",
274 choices = [ "all" ] + archnames
275 )
276 ]
277 )
278 ]
279 ))
280
281 ####### BUILDERS
282
283 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
284 # what steps, and which workers can execute them. Note that any particular build will
285 # only take place on one worker.
286
287 def GetDirectorySuffix(props):
288 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
289 if props.hasProperty("release_version"):
290 m = verpat.match(props["release_version"])
291 if m is not None:
292 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
293 return ""
294
295 def GetNumJobs(props):
296 if props.hasProperty("workername") and props.hasProperty("nproc"):
297 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
298 else:
299 return 1
300
301 def GetCwd(props):
302 if props.hasProperty("builddir"):
303 return props["builddir"]
304 elif props.hasProperty("workdir"):
305 return props["workdir"]
306 else:
307 return "/"
308
309 def IsArchitectureSelected(target):
310 def CheckArchitectureProperty(step):
311 try:
312 options = step.getProperty("options")
313 if type(options) is dict:
314 selected_arch = options.get("architecture", "all")
315 if selected_arch != "all" and selected_arch != target:
316 return False
317 except KeyError:
318 pass
319
320 return True
321
322 return CheckArchitectureProperty
323
324 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
325 try:
326 seckey = base64.b64decode(seckey)
327 except:
328 return None
329
330 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
331 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
332
333 def IsSharedWorkdir(step):
334 return bool(step.getProperty("shared_wd"))
335
336 @defer.inlineCallbacks
337 def getNewestCompleteTime(bldr):
338 """Returns the complete_at of the latest completed and not SKIPPED
339 build request for this builder, or None if there are no such build
340 requests. We need to filter out SKIPPED requests because we're
341 using collapseRequests=True which is unfortunately marking all
342 previous requests as complete when new buildset is created.
343
344 @returns: datetime instance or None, via Deferred
345 """
346
347 bldrid = yield bldr.getBuilderId()
348 completed = yield bldr.master.data.get(
349 ('builders', bldrid, 'buildrequests'),
350 [
351 resultspec.Filter('complete', 'eq', [True]),
352 resultspec.Filter('results', 'ne', [results.SKIPPED]),
353 ],
354 order=['-complete_at'], limit=1)
355 if not completed:
356 return
357
358 complete_at = completed[0]['complete_at']
359
360 last_build = yield bldr.master.data.get(
361 ('builds', ),
362 [
363 resultspec.Filter('builderid', 'eq', [bldrid]),
364 ],
365 order=['-started_at'], limit=1)
366
367 if last_build and last_build[0]:
368 last_complete_at = last_build[0]['complete_at']
369 if last_complete_at and (last_complete_at > complete_at):
370 return last_complete_at
371
372 return complete_at
373
374 @defer.inlineCallbacks
375 def prioritizeBuilders(master, builders):
376 """Returns sorted list of builders by their last timestamp of completed and
377 not skipped build.
378
379 @returns: list of sorted builders
380 """
381
382 def is_building(bldr):
383 return bool(bldr.building) or bool(bldr.old_building)
384
385 def bldr_info(bldr):
386 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
387 d.addCallback(lambda complete_at: (complete_at, bldr))
388 return d
389
390 def bldr_sort(item):
391 (complete_at, bldr) = item
392
393 if not complete_at:
394 date = datetime.min
395 complete_at = date.replace(tzinfo=tzutc())
396
397 if is_building(bldr):
398 date = datetime.max
399 complete_at = date.replace(tzinfo=tzutc())
400
401 return (complete_at, bldr.name)
402
403 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
404 results.sort(key=bldr_sort)
405
406 for r in results:
407 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
408
409 return [r[1] for r in results]
410
411 c['prioritizeBuilders'] = prioritizeBuilders
412 c['builders'] = []
413
414 dlLock = locks.WorkerLock("worker_dl")
415
416 workerNames = [ ]
417
418 for worker in c['workers']:
419 workerNames.append(worker.workername)
420
421 force_factory = BuildFactory()
422
423 c['builders'].append(BuilderConfig(
424 name = "00_force_build",
425 workernames = workerNames,
426 factory = force_factory))
427
428 for arch in arches:
429 ts = arch[1].split('/')
430
431 factory = BuildFactory()
432
433 # setup shared work directory if required
434 factory.addStep(ShellCommand(
435 name = "sharedwd",
436 description = "Setting up shared work directory",
437 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
438 workdir = ".",
439 haltOnFailure = True,
440 doStepIf = IsSharedWorkdir))
441
442 # find number of cores
443 factory.addStep(SetProperty(
444 name = "nproc",
445 property = "nproc",
446 description = "Finding number of CPUs",
447 command = ["nproc"]))
448
449 # prepare workspace
450 factory.addStep(FileDownload(
451 mastersrc = scripts_dir + '/cleanup.sh',
452 workerdest = "../cleanup.sh",
453 mode = 0o755))
454
455 if not persistent:
456 factory.addStep(ShellCommand(
457 name = "cleanold",
458 description = "Cleaning previous builds",
459 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
460 workdir = ".",
461 haltOnFailure = True,
462 timeout = 2400))
463
464 factory.addStep(ShellCommand(
465 name = "cleanup",
466 description = "Cleaning work area",
467 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
468 workdir = ".",
469 haltOnFailure = True,
470 timeout = 2400))
471
472 # expire tree if needed
473 elif tree_expire > 0:
474 factory.addStep(FileDownload(
475 mastersrc = scripts_dir + '/expire.sh',
476 workerdest = "../expire.sh",
477 mode = 0o755))
478
479 factory.addStep(ShellCommand(
480 name = "expire",
481 description = "Checking for build tree expiry",
482 command = ["./expire.sh", str(tree_expire)],
483 workdir = ".",
484 haltOnFailure = True,
485 timeout = 2400))
486
487 factory.addStep(ShellCommand(
488 name = "mksdkdir",
489 description = "Preparing SDK directory",
490 command = ["mkdir", "-p", "sdk"],
491 haltOnFailure = True))
492
493 factory.addStep(ShellCommand(
494 name = "downloadsdk",
495 description = "Downloading SDK archive",
496 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
497 env={'RSYNC_PASSWORD': rsync_sdk_key},
498 haltOnFailure = True,
499 logEnviron = False))
500
501 factory.addStep(ShellCommand(
502 name = "unpacksdk",
503 description = "Unpacking SDK archive",
504 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
505 haltOnFailure = True))
506
507 factory.addStep(ShellCommand(
508 name = "updatesdk",
509 description = "Updating SDK",
510 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
511 haltOnFailure = True))
512
513 factory.addStep(ShellCommand(
514 name = "cleancmdlinks",
515 description = "Sanitizing host command symlinks",
516 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
517 haltOnFailure = True))
518
519 factory.addStep(StringDownload(
520 name = "writeversionmk",
521 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
522 workerdest = "sdk/getversion.mk",
523 mode = 0o755))
524
525 factory.addStep(SetProperty(
526 name = "getversion",
527 property = "release_version",
528 description = "Finding SDK release version",
529 workdir = "build/sdk",
530 command = ["make", "-f", "getversion.mk"]))
531
532 # install build key
533 if usign_key is not None:
534 factory.addStep(StringDownload(
535 name = "dlkeybuildpub",
536 s = UsignSec2Pub(usign_key, usign_comment),
537 workerdest = "sdk/key-build.pub",
538 mode = 0o600))
539
540 factory.addStep(StringDownload(
541 name = "dlkeybuild",
542 s = "# fake private key",
543 workerdest = "sdk/key-build",
544 mode = 0o600))
545
546 factory.addStep(StringDownload(
547 name = "dlkeybuilducert",
548 s = "# fake certificate",
549 workerdest = "sdk/key-build.ucert",
550 mode = 0o600))
551
552 factory.addStep(ShellCommand(
553 name = "mkdldir",
554 description = "Preparing download directory",
555 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
556 haltOnFailure = True))
557
558 factory.addStep(ShellCommand(
559 name = "mkconf",
560 description = "Preparing SDK configuration",
561 workdir = "build/sdk",
562 command = ["sh", "-c", "rm -f .config && make defconfig"]))
563
564 factory.addStep(FileDownload(
565 mastersrc = scripts_dir + '/ccache.sh',
566 workerdest = 'sdk/ccache.sh',
567 mode = 0o755))
568
569 factory.addStep(ShellCommand(
570 name = "prepccache",
571 description = "Preparing ccache",
572 workdir = "build/sdk",
573 command = ["./ccache.sh"],
574 haltOnFailure = True))
575
576 factory.addStep(ShellCommand(
577 name = "patchfeedsconfgitfull",
578 description = "Patching feeds.conf to use src-git-full",
579 workdir = "build/sdk",
580 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
581 haltOnFailure = True))
582
583 if git_ssh:
584 factory.addStep(StringDownload(
585 name = "dlgitclonekey",
586 s = git_ssh_key,
587 workerdest = "../git-clone.key",
588 mode = 0o600))
589
590 factory.addStep(ShellCommand(
591 name = "patchfeedsconf",
592 description = "Patching feeds.conf to use SSH cloning",
593 workdir = "build/sdk",
594 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
595 haltOnFailure = True))
596
597 factory.addStep(ShellCommand(
598 name = "updatefeeds",
599 description = "Updating feeds",
600 workdir = "build/sdk",
601 command = ["./scripts/feeds", "update", "-f"],
602 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
603 haltOnFailure = True))
604
605 if git_ssh:
606 factory.addStep(ShellCommand(
607 name = "rmfeedsconf",
608 description = "Removing feeds.conf",
609 workdir = "build/sdk",
610 command=["rm", "feeds.conf"],
611 haltOnFailure = True))
612
613 factory.addStep(ShellCommand(
614 name = "installfeeds",
615 description = "Installing feeds",
616 workdir = "build/sdk",
617 command = ["./scripts/feeds", "install", "-a"],
618 haltOnFailure = True))
619
620 factory.addStep(ShellCommand(
621 name = "logclear",
622 description = "Clearing failure logs",
623 workdir = "build/sdk",
624 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
625 haltOnFailure = False,
626 flunkOnFailure = False,
627 warnOnFailure = True,
628 ))
629
630 factory.addStep(ShellCommand(
631 name = "compile",
632 description = "Building packages",
633 workdir = "build/sdk",
634 timeout = 3600,
635 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
636 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
637 haltOnFailure = True))
638
639 factory.addStep(ShellCommand(
640 name = "mkfeedsconf",
641 description = "Generating pinned feeds.conf",
642 workdir = "build/sdk",
643 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
644
645 if ini.has_option("gpg", "key") or usign_key is not None:
646 factory.addStep(MasterShellCommand(
647 name = "signprepare",
648 description = "Preparing temporary signing directory",
649 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
650 haltOnFailure = True
651 ))
652
653 factory.addStep(ShellCommand(
654 name = "signpack",
655 description = "Packing files to sign",
656 workdir = "build/sdk",
657 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
658 haltOnFailure = True
659 ))
660
661 factory.addStep(FileUpload(
662 workersrc = "sdk/sign.tar.gz",
663 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
664 haltOnFailure = True
665 ))
666
667 factory.addStep(MasterShellCommand(
668 name = "signfiles",
669 description = "Signing files",
670 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
671 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
672 haltOnFailure = True
673 ))
674
675 factory.addStep(FileDownload(
676 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
677 workerdest = "sdk/sign.tar.gz",
678 haltOnFailure = True
679 ))
680
681 factory.addStep(ShellCommand(
682 name = "signunpack",
683 description = "Unpacking signed files",
684 workdir = "build/sdk",
685 command = ["tar", "-xzf", "sign.tar.gz"],
686 haltOnFailure = True
687 ))
688
689 factory.addStep(ShellCommand(
690 name = "uploadprepare",
691 description = "Preparing package directory",
692 workdir = "build/sdk",
693 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
694 env={'RSYNC_PASSWORD': rsync_bin_key},
695 haltOnFailure = True,
696 logEnviron = False
697 ))
698
699 factory.addStep(ShellCommand(
700 name = "packageupload",
701 description = "Uploading package files",
702 workdir = "build/sdk",
703 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
704 env={'RSYNC_PASSWORD': rsync_bin_key},
705 haltOnFailure = True,
706 logEnviron = False
707 ))
708
709 factory.addStep(ShellCommand(
710 name = "logprepare",
711 description = "Preparing log directory",
712 workdir = "build/sdk",
713 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
714 env={'RSYNC_PASSWORD': rsync_bin_key},
715 haltOnFailure = True,
716 logEnviron = False
717 ))
718
719 factory.addStep(ShellCommand(
720 name = "logfind",
721 description = "Finding failure logs",
722 workdir = "build/sdk/logs/package/feeds",
723 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
724 haltOnFailure = False,
725 flunkOnFailure = False,
726 warnOnFailure = True,
727 ))
728
729 factory.addStep(ShellCommand(
730 name = "logcollect",
731 description = "Collecting failure logs",
732 workdir = "build/sdk",
733 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
734 haltOnFailure = False,
735 flunkOnFailure = False,
736 warnOnFailure = True,
737 ))
738
739 factory.addStep(ShellCommand(
740 name = "logupload",
741 description = "Uploading failure logs",
742 workdir = "build/sdk",
743 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
744 env={'RSYNC_PASSWORD': rsync_bin_key},
745 haltOnFailure = False,
746 flunkOnFailure = False,
747 warnOnFailure = True,
748 logEnviron = False
749 ))
750
751 if rsync_src_url is not None:
752 factory.addStep(ShellCommand(
753 name = "sourcelist",
754 description = "Finding source archives to upload",
755 workdir = "build/sdk",
756 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
757 haltOnFailure = True
758 ))
759
760 factory.addStep(ShellCommand(
761 name = "sourceupload",
762 description = "Uploading source archives",
763 workdir = "build/sdk",
764 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
765 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
766 env={'RSYNC_PASSWORD': rsync_src_key},
767 haltOnFailure = False,
768 flunkOnFailure = False,
769 warnOnFailure = True,
770 logEnviron = False
771 ))
772
773 factory.addStep(ShellCommand(
774 name = "df",
775 description = "Reporting disk usage",
776 command=["df", "-h", "."],
777 env={'LC_ALL': 'C'},
778 haltOnFailure = False,
779 flunkOnFailure = False,
780 warnOnFailure = False,
781 alwaysRun = True
782 ))
783
784 factory.addStep(ShellCommand(
785 name = "du",
786 description = "Reporting estimated file space usage",
787 command=["du", "-sh", "."],
788 env={'LC_ALL': 'C'},
789 haltOnFailure = False,
790 flunkOnFailure = False,
791 warnOnFailure = False,
792 alwaysRun = True
793 ))
794
795 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
796
797 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
798 force_factory.addStep(steps.Trigger(
799 name = "trigger_%s" % arch[0],
800 description = "Triggering %s build" % arch[0],
801 schedulerNames = [ "trigger_%s" % arch[0] ],
802 set_properties = { "reason": Property("reason") },
803 doStepIf = IsArchitectureSelected(arch[0])
804 ))
805
806 ####### STATUS arches
807
808 # 'status' is a list of Status arches. The results of each build will be
809 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
810 # including web pages, email senders, and IRC bots.
811
812 if ini.has_option("phase2", "status_bind"):
813 c['www'] = {
814 'port': ini.get("phase2", "status_bind"),
815 'plugins': {
816 'waterfall_view': True,
817 'console_view': True,
818 'grid_view': True
819 }
820 }
821
822 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
823 c['www']['auth'] = util.UserPasswordAuth([
824 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
825 ])
826 c['www']['authz'] = util.Authz(
827 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
828 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
829 )
830
831 ####### PROJECT IDENTITY
832
833 # the 'title' string will appear at the top of this buildbot
834 # installation's html.WebStatus home page (linked to the
835 # 'titleURL') and is embedded in the title of the waterfall HTML page.
836
837 c['title'] = ini.get("general", "title")
838 c['titleURL'] = ini.get("general", "title_url")
839
840 # the 'buildbotURL' string should point to the location where the buildbot's
841 # internal web server (usually the html.WebStatus page) is visible. This
842 # typically uses the port number set in the Waterfall 'status' entry, but
843 # with an externally-visible host name which the buildbot cannot figure out
844 # without some help.
845
846 c['buildbotURL'] = buildbot_url
847
848 ####### DB URL
849
850 c['db'] = {
851 # This specifies what database buildbot uses to store its state. You can leave
852 # this at its default for all but the largest installations.
853 'db_url' : "sqlite:///state.sqlite",
854 }
855
856 c['buildbotNetUsageData'] = None