phase1,phase2: fix reconfig command by creating twistd.pid
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import base64
7 import random
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import WithProperties
29 from buildbot.schedulers.basic import SingleBranchScheduler
30 from buildbot.schedulers.forcesched import ForceScheduler
31 from buildbot.steps.master import MasterShellCommand
32 from buildbot.steps.shell import SetProperty
33 from buildbot.steps.shell import ShellCommand
34 from buildbot.steps.transfer import FileDownload
35 from buildbot.steps.transfer import FileUpload
36 from buildbot.steps.transfer import StringDownload
37 from buildbot.worker import Worker
38
39
40 if not os.path.exists("twistd.pid"):
41 with open("twistd.pid", "w") as pidfile:
42 pidfile.write("{}".format(os.getpid()))
43
44 ini = configparser.ConfigParser()
45 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
46
47 buildbot_url = ini.get("phase2", "buildbot_url")
48
49 # This is a sample buildmaster config file. It must be installed as
50 # 'master.cfg' in your buildmaster's base directory.
51
52 # This is the dictionary that the buildmaster pays attention to. We also use
53 # a shorter alias to save typing.
54 c = BuildmasterConfig = {}
55
56 ####### BUILDWORKERS
57
58 # The 'workers' list defines the set of recognized buildworkers. Each element is
59 # a Worker object, specifying a unique worker name and password. The same
60 # worker name and password must be configured on the worker.
61
62 worker_port = 9990
63 persistent = False
64 other_builds = 0
65 tree_expire = 0
66 git_ssh = False
67 git_ssh_key = None
68
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
71
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
74
75 if ini.has_option("phase2", "other_builds"):
76 other_builds = ini.getint("phase2", "other_builds")
77
78 if ini.has_option("phase2", "expire"):
79 tree_expire = ini.getint("phase2", "expire")
80
81 if ini.has_option("general", "git_ssh"):
82 git_ssh = ini.getboolean("general", "git_ssh")
83
84 if ini.has_option("general", "git_ssh_key"):
85 git_ssh_key = ini.get("general", "git_ssh_key")
86 else:
87 git_ssh = False
88
89 c['workers'] = []
90 max_builds = dict()
91
92 for section in ini.sections():
93 if section.startswith("worker "):
94 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
95 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
96 name = ini.get(section, "name")
97 password = ini.get(section, "password")
98 sl_props = { 'shared_wd': False }
99 max_builds[name] = 1
100
101 if ini.has_option(section, "builds"):
102 max_builds[name] = ini.getint(section, "builds")
103
104 if max_builds[name] == 1:
105 sl_props['shared_wd'] = True
106
107 if ini.has_option(section, "shared_wd"):
108 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
109 if sl_props['shared_wd'] and (max_builds != 1):
110 raise ValueError('max_builds must be 1 with shared workdir!')
111
112 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
113
114 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
115 # This must match the value configured into the buildworkers (with their
116 # --master option)
117 c['protocols'] = {'pb': {'port': worker_port}}
118
119 # coalesce builds
120 c['collapseRequests'] = True
121
122 # Reduce amount of backlog data
123 c['configurators'] = [util.JanitorConfigurator(
124 logHorizon=timedelta(days=3),
125 hour=6,
126 )]
127
128 ####### CHANGESOURCES
129
130 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
131 scripts_dir = os.path.abspath("../scripts")
132
133 rsync_bin_url = ini.get("rsync", "binary_url")
134 rsync_bin_key = ini.get("rsync", "binary_password")
135
136 rsync_src_url = None
137 rsync_src_key = None
138
139 if ini.has_option("rsync", "source_url"):
140 rsync_src_url = ini.get("rsync", "source_url")
141 rsync_src_key = ini.get("rsync", "source_password")
142
143 rsync_sdk_url = None
144 rsync_sdk_key = None
145 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
146
147 if ini.has_option("rsync", "sdk_url"):
148 rsync_sdk_url = ini.get("rsync", "sdk_url")
149
150 if ini.has_option("rsync", "sdk_password"):
151 rsync_sdk_key = ini.get("rsync", "sdk_password")
152
153 if ini.has_option("rsync", "sdk_pattern"):
154 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
155
156 repo_url = ini.get("repo", "url")
157 repo_branch = "master"
158
159 if ini.has_option("repo", "branch"):
160 repo_branch = ini.get("repo", "branch")
161
162 usign_key = None
163 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
164
165 if ini.has_option("usign", "key"):
166 usign_key = ini.get("usign", "key")
167
168 if ini.has_option("usign", "comment"):
169 usign_comment = ini.get("usign", "comment")
170
171
172 # find arches
173 arches = [ ]
174 archnames = [ ]
175
176 if not os.path.isdir(work_dir+'/source.git'):
177 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
178 else:
179 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
180
181 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
182 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
183 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
184
185 while True:
186 line = findarches.stdout.readline()
187 if not line:
188 break
189 at = line.decode().strip().split()
190 arches.append(at)
191 archnames.append(at[0])
192
193
194 # find feeds
195 feeds = []
196 feedbranches = dict()
197
198 c['change_source'] = []
199
200 def parse_feed_entry(line):
201 parts = line.strip().split()
202 if parts[0] == "src-git":
203 feeds.append(parts)
204 url = parts[2].strip().split(';')
205 branch = url[1] if len(url) > 1 else 'master'
206 feedbranches[url[0]] = branch
207 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
208
209 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
210 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
211
212 line = make.stdout.readline()
213 if line:
214 parse_feed_entry(line)
215
216 with open(work_dir+'/source.git/feeds.conf.default', 'r') as f:
217 for line in f:
218 parse_feed_entry(line)
219
220
221 ####### SCHEDULERS
222
223 # Configure the Schedulers, which decide how to react to incoming changes. In this
224 # case, just kick off a 'basebuild' build
225
226 c['schedulers'] = []
227 c['schedulers'].append(SingleBranchScheduler(
228 name = "all",
229 change_filter = filter.ChangeFilter(
230 filter_fn = lambda change: change.branch == feedbranches[change.repository]
231 ),
232 treeStableTimer = 60,
233 builderNames = archnames))
234
235 c['schedulers'].append(ForceScheduler(
236 name = "force",
237 buttonName = "Force builds",
238 label = "Force build details",
239 builderNames = [ "00_force_build" ],
240
241 codebases = [
242 util.CodebaseParameter(
243 "",
244 label = "Repository",
245 branch = util.FixedParameter(name = "branch", default = ""),
246 revision = util.FixedParameter(name = "revision", default = ""),
247 repository = util.FixedParameter(name = "repository", default = ""),
248 project = util.FixedParameter(name = "project", default = "")
249 )
250 ],
251
252 reason = util.StringParameter(
253 name = "reason",
254 label = "Reason",
255 default = "Trigger build",
256 required = True,
257 size = 80
258 ),
259
260 properties = [
261 util.NestedParameter(
262 name="options",
263 label="Build Options",
264 layout="vertical",
265 fields=[
266 util.ChoiceStringParameter(
267 name = "architecture",
268 label = "Build architecture",
269 default = "all",
270 choices = [ "all" ] + archnames
271 )
272 ]
273 )
274 ]
275 ))
276
277 ####### BUILDERS
278
279 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
280 # what steps, and which workers can execute them. Note that any particular build will
281 # only take place on one worker.
282
283 def GetDirectorySuffix(props):
284 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
285 if props.hasProperty("release_version"):
286 m = verpat.match(props["release_version"])
287 if m is not None:
288 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
289 return ""
290
291 def GetNumJobs(props):
292 if props.hasProperty("workername") and props.hasProperty("nproc"):
293 return ((int(props["nproc"]) / (max_builds[props["workername"]] + other_builds)) + 1)
294 else:
295 return 1
296
297 def GetCwd(props):
298 if props.hasProperty("builddir"):
299 return props["builddir"]
300 elif props.hasProperty("workdir"):
301 return props["workdir"]
302 else:
303 return "/"
304
305 def IsArchitectureSelected(target):
306 def CheckArchitectureProperty(step):
307 try:
308 options = step.getProperty("options")
309 if type(options) is dict:
310 selected_arch = options.get("architecture", "all")
311 if selected_arch != "all" and selected_arch != target:
312 return False
313 except KeyError:
314 pass
315
316 return True
317
318 return CheckArchitectureProperty
319
320 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
321 try:
322 seckey = base64.b64decode(seckey)
323 except:
324 return None
325
326 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
327 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
328
329 def IsSharedWorkdir(step):
330 return bool(step.getProperty("shared_wd"))
331
332 @defer.inlineCallbacks
333 def getNewestCompleteTime(bldr):
334 """Returns the complete_at of the latest completed and not SKIPPED
335 build request for this builder, or None if there are no such build
336 requests. We need to filter out SKIPPED requests because we're
337 using collapseRequests=True which is unfortunately marking all
338 previous requests as complete when new buildset is created.
339
340 @returns: datetime instance or None, via Deferred
341 """
342
343 bldrid = yield bldr.getBuilderId()
344 completed = yield bldr.master.data.get(
345 ('builders', bldrid, 'buildrequests'),
346 [
347 resultspec.Filter('complete', 'eq', [True]),
348 resultspec.Filter('results', 'ne', [results.SKIPPED]),
349 ],
350 order=['-complete_at'], limit=1)
351 if not completed:
352 return
353
354 return completed[0]['complete_at']
355
356 @defer.inlineCallbacks
357 def prioritizeBuilders(master, builders):
358 """Returns sorted list of builders by their last timestamp of completed and
359 not skipped build.
360
361 @returns: list of sorted builders
362 """
363
364 def is_building(bldr):
365 return bool(bldr.building) or bool(bldr.old_building)
366
367 def bldr_info(bldr):
368 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
369 d.addCallback(lambda complete_at: (complete_at, bldr))
370 return d
371
372 def bldr_sort(item):
373 (complete_at, bldr) = item
374
375 if not complete_at:
376 date = datetime.min
377 complete_at = date.replace(tzinfo=tzutc())
378
379 if is_building(bldr):
380 date = datetime.max
381 complete_at = date.replace(tzinfo=tzutc())
382
383 return (complete_at, bldr.name)
384
385 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
386 results.sort(key=bldr_sort)
387
388 for r in results:
389 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
390
391 return [r[1] for r in results]
392
393 c['prioritizeBuilders'] = prioritizeBuilders
394 c['builders'] = []
395
396 dlLock = locks.WorkerLock("worker_dl")
397
398 workerNames = [ ]
399
400 for worker in c['workers']:
401 workerNames.append(worker.workername)
402
403 force_factory = BuildFactory()
404
405 c['builders'].append(BuilderConfig(
406 name = "00_force_build",
407 workernames = workerNames,
408 factory = force_factory))
409
410 for arch in arches:
411 ts = arch[1].split('/')
412
413 factory = BuildFactory()
414
415 # setup shared work directory if required
416 factory.addStep(ShellCommand(
417 name = "sharedwd",
418 description = "Setting up shared work directory",
419 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
420 workdir = ".",
421 haltOnFailure = True,
422 doStepIf = IsSharedWorkdir))
423
424 # find number of cores
425 factory.addStep(SetProperty(
426 name = "nproc",
427 property = "nproc",
428 description = "Finding number of CPUs",
429 command = ["nproc"]))
430
431 # prepare workspace
432 factory.addStep(FileDownload(
433 mastersrc = scripts_dir + '/cleanup.sh',
434 workerdest = "../cleanup.sh",
435 mode = 0o755))
436
437 if not persistent:
438 factory.addStep(ShellCommand(
439 name = "cleanold",
440 description = "Cleaning previous builds",
441 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "full"],
442 workdir = ".",
443 haltOnFailure = True,
444 timeout = 2400))
445
446 factory.addStep(ShellCommand(
447 name = "cleanup",
448 description = "Cleaning work area",
449 command = ["./cleanup.sh", buildbot_url, WithProperties("%(workername)s"), WithProperties("%(buildername)s"), "single"],
450 workdir = ".",
451 haltOnFailure = True,
452 timeout = 2400))
453
454 # expire tree if needed
455 elif tree_expire > 0:
456 factory.addStep(FileDownload(
457 mastersrc = scripts_dir + '/expire.sh',
458 workerdest = "../expire.sh",
459 mode = 0o755))
460
461 factory.addStep(ShellCommand(
462 name = "expire",
463 description = "Checking for build tree expiry",
464 command = ["./expire.sh", str(tree_expire)],
465 workdir = ".",
466 haltOnFailure = True,
467 timeout = 2400))
468
469 factory.addStep(ShellCommand(
470 name = "mksdkdir",
471 description = "Preparing SDK directory",
472 command = ["mkdir", "-p", "sdk"],
473 haltOnFailure = True))
474
475 factory.addStep(ShellCommand(
476 name = "downloadsdk",
477 description = "Downloading SDK archive",
478 command = ["rsync", "-4", "-va", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
479 env={'RSYNC_PASSWORD': rsync_sdk_key},
480 haltOnFailure = True,
481 logEnviron = False))
482
483 factory.addStep(ShellCommand(
484 name = "unpacksdk",
485 description = "Unpacking SDK archive",
486 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
487 haltOnFailure = True))
488
489 factory.addStep(ShellCommand(
490 name = "updatesdk",
491 description = "Updating SDK",
492 command = "rsync --checksum -av sdk_update/ sdk/ && rm -rf sdk_update",
493 haltOnFailure = True))
494
495 factory.addStep(ShellCommand(
496 name = "cleancmdlinks",
497 description = "Sanitizing host command symlinks",
498 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
499 haltOnFailure = True))
500
501 factory.addStep(StringDownload(
502 name = "writeversionmk",
503 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
504 workerdest = "sdk/getversion.mk",
505 mode = 0o755))
506
507 factory.addStep(SetProperty(
508 name = "getversion",
509 property = "release_version",
510 description = "Finding SDK release version",
511 workdir = "build/sdk",
512 command = ["make", "-f", "getversion.mk"]))
513
514 # install build key
515 if usign_key is not None:
516 factory.addStep(StringDownload(
517 name = "dlkeybuildpub",
518 s = UsignSec2Pub(usign_key, usign_comment),
519 workerdest = "sdk/key-build.pub",
520 mode = 0o600))
521
522 factory.addStep(StringDownload(
523 name = "dlkeybuild",
524 s = "# fake private key",
525 workerdest = "sdk/key-build",
526 mode = 0o600))
527
528 factory.addStep(StringDownload(
529 name = "dlkeybuilducert",
530 s = "# fake certificate",
531 workerdest = "sdk/key-build.ucert",
532 mode = 0o600))
533
534 factory.addStep(ShellCommand(
535 name = "mkdldir",
536 description = "Preparing download directory",
537 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
538 haltOnFailure = True))
539
540 factory.addStep(ShellCommand(
541 name = "mkconf",
542 description = "Preparing SDK configuration",
543 workdir = "build/sdk",
544 command = ["sh", "-c", "rm -f .config && make defconfig"]))
545
546 factory.addStep(FileDownload(
547 mastersrc = scripts_dir + '/ccache.sh',
548 workerdest = 'sdk/ccache.sh',
549 mode = 0o755))
550
551 factory.addStep(ShellCommand(
552 name = "prepccache",
553 description = "Preparing ccache",
554 workdir = "build/sdk",
555 command = ["./ccache.sh"],
556 haltOnFailure = True))
557
558 factory.addStep(ShellCommand(
559 name = "patchfeedsconfgitfull",
560 description = "Patching feeds.conf to use src-git-full",
561 workdir = "build/sdk",
562 command = "sed -e 's#^src-git #src-git-full #g' feeds.conf.default > feeds.conf",
563 haltOnFailure = True))
564
565 if git_ssh:
566 factory.addStep(StringDownload(
567 name = "dlgitclonekey",
568 s = git_ssh_key,
569 workerdest = "../git-clone.key",
570 mode = 0o600))
571
572 factory.addStep(ShellCommand(
573 name = "patchfeedsconf",
574 description = "Patching feeds.conf to use SSH cloning",
575 workdir = "build/sdk",
576 command = "sed -i -e 's#https://#ssh://git@#g' feeds.conf",
577 haltOnFailure = True))
578
579 factory.addStep(ShellCommand(
580 name = "updatefeeds",
581 description = "Updating feeds",
582 workdir = "build/sdk",
583 command = ["./scripts/feeds", "update", "-f"],
584 env = {'GIT_SSH_COMMAND': WithProperties("ssh -o IdentitiesOnly=yes -o IdentityFile=%(cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
585 haltOnFailure = True))
586
587 if git_ssh:
588 factory.addStep(ShellCommand(
589 name = "rmfeedsconf",
590 description = "Removing feeds.conf",
591 workdir = "build/sdk",
592 command=["rm", "feeds.conf"],
593 haltOnFailure = True))
594
595 factory.addStep(ShellCommand(
596 name = "installfeeds",
597 description = "Installing feeds",
598 workdir = "build/sdk",
599 command = ["./scripts/feeds", "install", "-a"],
600 haltOnFailure = True))
601
602 factory.addStep(ShellCommand(
603 name = "logclear",
604 description = "Clearing failure logs",
605 workdir = "build/sdk",
606 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
607 haltOnFailure = False
608 ))
609
610 factory.addStep(ShellCommand(
611 name = "compile",
612 description = "Building packages",
613 workdir = "build/sdk",
614 timeout = 3600,
615 command = ["make", WithProperties("-j%(jobs)d", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
616 env = {'CCACHE_BASEDIR': WithProperties("%(cwd)s", cwd=GetCwd)},
617 haltOnFailure = True))
618
619 factory.addStep(ShellCommand(
620 name = "mkfeedsconf",
621 description = "Generating pinned feeds.conf",
622 workdir = "build/sdk",
623 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
624
625 if ini.has_option("gpg", "key") or usign_key is not None:
626 factory.addStep(MasterShellCommand(
627 name = "signprepare",
628 description = "Preparing temporary signing directory",
629 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
630 haltOnFailure = True
631 ))
632
633 factory.addStep(ShellCommand(
634 name = "signpack",
635 description = "Packing files to sign",
636 workdir = "build/sdk",
637 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
638 haltOnFailure = True
639 ))
640
641 factory.addStep(FileUpload(
642 workersrc = "sdk/sign.tar.gz",
643 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
644 haltOnFailure = True
645 ))
646
647 factory.addStep(MasterShellCommand(
648 name = "signfiles",
649 description = "Signing files",
650 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
651 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
652 haltOnFailure = True
653 ))
654
655 factory.addStep(FileDownload(
656 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
657 workerdest = "sdk/sign.tar.gz",
658 haltOnFailure = True
659 ))
660
661 factory.addStep(ShellCommand(
662 name = "signunpack",
663 description = "Unpacking signed files",
664 workdir = "build/sdk",
665 command = ["tar", "-xzf", "sign.tar.gz"],
666 haltOnFailure = True
667 ))
668
669 factory.addStep(ShellCommand(
670 name = "uploadprepare",
671 description = "Preparing package directory",
672 workdir = "build/sdk",
673 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/packages%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
674 env={'RSYNC_PASSWORD': rsync_bin_key},
675 haltOnFailure = True,
676 logEnviron = False
677 ))
678
679 factory.addStep(ShellCommand(
680 name = "packageupload",
681 description = "Uploading package files",
682 workdir = "build/sdk",
683 command = ["rsync", "-4", "--progress", "--delete", "--checksum", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "bin/packages/%s/" %(arch[0]), WithProperties("%s/packages%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
684 env={'RSYNC_PASSWORD': rsync_bin_key},
685 haltOnFailure = True,
686 logEnviron = False
687 ))
688
689 factory.addStep(ShellCommand(
690 name = "logprepare",
691 description = "Preparing log directory",
692 workdir = "build/sdk",
693 command = ["rsync", "-4", "-av", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", WithProperties("%s/faillogs%%(suffix)s/" %(rsync_bin_url), suffix=GetDirectorySuffix)],
694 env={'RSYNC_PASSWORD': rsync_bin_key},
695 haltOnFailure = True,
696 logEnviron = False
697 ))
698
699 factory.addStep(ShellCommand(
700 name = "logfind",
701 description = "Finding failure logs",
702 workdir = "build/sdk/logs/package/feeds",
703 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
704 haltOnFailure = False
705 ))
706
707 factory.addStep(ShellCommand(
708 name = "logcollect",
709 description = "Collecting failure logs",
710 workdir = "build/sdk",
711 command = ["rsync", "-av", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
712 haltOnFailure = False
713 ))
714
715 factory.addStep(ShellCommand(
716 name = "logupload",
717 description = "Uploading failure logs",
718 workdir = "build/sdk",
719 command = ["rsync", "-4", "--progress", "--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-avz", "faillogs/", WithProperties("%s/faillogs%%(suffix)s/%s/" %(rsync_bin_url, arch[0]), suffix=GetDirectorySuffix)],
720 env={'RSYNC_PASSWORD': rsync_bin_key},
721 haltOnFailure = False,
722 logEnviron = False
723 ))
724
725 if rsync_src_url is not None:
726 factory.addStep(ShellCommand(
727 name = "sourcelist",
728 description = "Finding source archives to upload",
729 workdir = "build/sdk",
730 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
731 haltOnFailure = True
732 ))
733
734 factory.addStep(ShellCommand(
735 name = "sourceupload",
736 description = "Uploading source archives",
737 workdir = "build/sdk",
738 command = ["rsync", "--files-from=sourcelist", "-4", "--progress", "--checksum", "--delay-updates",
739 WithProperties("--partial-dir=.~tmp~%s~%%(workername)s" %(arch[0])), "-avz", "dl/", "%s/" %(rsync_src_url)],
740 env={'RSYNC_PASSWORD': rsync_src_key},
741 haltOnFailure = False,
742 logEnviron = False
743 ))
744
745 factory.addStep(ShellCommand(
746 name = "df",
747 description = "Reporting disk usage",
748 command=["df", "-h", "."],
749 env={'LC_ALL': 'C'},
750 haltOnFailure = False,
751 alwaysRun = True
752 ))
753
754 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
755
756 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
757 force_factory.addStep(steps.Trigger(
758 name = "trigger_%s" % arch[0],
759 description = "Triggering %s build" % arch[0],
760 schedulerNames = [ "trigger_%s" % arch[0] ],
761 set_properties = { "reason": Property("reason") },
762 doStepIf = IsArchitectureSelected(arch[0])
763 ))
764
765 ####### STATUS arches
766
767 # 'status' is a list of Status arches. The results of each build will be
768 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
769 # including web pages, email senders, and IRC bots.
770
771 if ini.has_option("phase2", "status_bind"):
772 c['www'] = {
773 'port': ini.get("phase2", "status_bind"),
774 'plugins': {
775 'waterfall_view': True,
776 'console_view': True,
777 'grid_view': True
778 }
779 }
780
781 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
782 c['www']['auth'] = util.UserPasswordAuth([
783 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
784 ])
785 c['www']['authz'] = util.Authz(
786 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
787 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
788 )
789
790 ####### PROJECT IDENTITY
791
792 # the 'title' string will appear at the top of this buildbot
793 # installation's html.WebStatus home page (linked to the
794 # 'titleURL') and is embedded in the title of the waterfall HTML page.
795
796 c['title'] = ini.get("general", "title")
797 c['titleURL'] = ini.get("general", "title_url")
798
799 # the 'buildbotURL' string should point to the location where the buildbot's
800 # internal web server (usually the html.WebStatus page) is visible. This
801 # typically uses the port number set in the Waterfall 'status' entry, but
802 # with an externally-visible host name which the buildbot cannot figure out
803 # without some help.
804
805 c['buildbotURL'] = buildbot_url
806
807 ####### DB URL
808
809 c['db'] = {
810 # This specifies what database buildbot uses to store its state. You can leave
811 # this at its default for all but the largest installations.
812 'db_url' : "sqlite:///state.sqlite",
813 }
814
815 c['buildbotNetUsageData'] = None