phase2: rsync: use --size-only instead of --checksum for sourceupload
[buildbot.git] / phase2 / master.cfg
1 # -*- python -*-
2 # ex: set syntax=python:
3
4 import os
5 import re
6 import sys
7 import base64
8 import subprocess
9 import configparser
10
11 from dateutil.tz import tzutc
12 from datetime import datetime, timedelta
13
14 from twisted.internet import defer
15 from twisted.python import log
16
17 from buildbot import locks
18 from buildbot.data import resultspec
19 from buildbot.changes import filter
20 from buildbot.changes.gitpoller import GitPoller
21 from buildbot.config import BuilderConfig
22 from buildbot.plugins import schedulers
23 from buildbot.plugins import steps
24 from buildbot.plugins import util
25 from buildbot.process import results
26 from buildbot.process.factory import BuildFactory
27 from buildbot.process.properties import Property
28 from buildbot.process.properties import Interpolate
29 from buildbot.process import properties
30 from buildbot.schedulers.basic import SingleBranchScheduler
31 from buildbot.schedulers.forcesched import ForceScheduler
32 from buildbot.steps.master import MasterShellCommand
33 from buildbot.steps.shell import SetPropertyFromCommand
34 from buildbot.steps.shell import ShellCommand
35 from buildbot.steps.transfer import FileDownload
36 from buildbot.steps.transfer import FileUpload
37 from buildbot.steps.transfer import StringDownload
38 from buildbot.worker import Worker
39
40
41 if not os.path.exists("twistd.pid"):
42 with open("twistd.pid", "w") as pidfile:
43 pidfile.write("{}".format(os.getpid()))
44
45 ini = configparser.ConfigParser()
46 ini.read(os.getenv("BUILDMASTER_CONFIG", "./config.ini"))
47
48 buildbot_url = ini.get("phase2", "buildbot_url")
49
50 # This is a sample buildmaster config file. It must be installed as
51 # 'master.cfg' in your buildmaster's base directory.
52
53 # This is the dictionary that the buildmaster pays attention to. We also use
54 # a shorter alias to save typing.
55 c = BuildmasterConfig = {}
56
57 ####### BUILDWORKERS
58
59 # The 'workers' list defines the set of recognized buildworkers. Each element is
60 # a Worker object, specifying a unique worker name and password. The same
61 # worker name and password must be configured on the worker.
62
63 worker_port = 9990
64 persistent = False
65 tree_expire = 0
66 git_ssh = False
67 git_ssh_key = None
68
69 if ini.has_option("phase2", "port"):
70 worker_port = ini.get("phase2", "port")
71
72 if ini.has_option("phase2", "persistent"):
73 persistent = ini.getboolean("phase2", "persistent")
74
75 if ini.has_option("phase2", "expire"):
76 tree_expire = ini.getint("phase2", "expire")
77
78 if ini.has_option("general", "git_ssh"):
79 git_ssh = ini.getboolean("general", "git_ssh")
80
81 if ini.has_option("general", "git_ssh_key"):
82 git_ssh_key = ini.get("general", "git_ssh_key")
83 else:
84 git_ssh = False
85
86 c['workers'] = []
87 max_builds = dict()
88
89 for section in ini.sections():
90 if section.startswith("worker "):
91 if ini.has_option(section, "name") and ini.has_option(section, "password") and \
92 ini.has_option(section, "phase") and ini.getint(section, "phase") == 2:
93 name = ini.get(section, "name")
94 password = ini.get(section, "password")
95 sl_props = { 'shared_wd': False }
96 max_builds[name] = 1
97
98 if ini.has_option(section, "builds"):
99 max_builds[name] = ini.getint(section, "builds")
100
101 if max_builds[name] == 1:
102 sl_props['shared_wd'] = True
103
104 if ini.has_option(section, "shared_wd"):
105 sl_props['shared_wd'] = ini.getboolean(section, "shared_wd")
106 if sl_props['shared_wd'] and (max_builds != 1):
107 raise ValueError('max_builds must be 1 with shared workdir!')
108
109 c['workers'].append(Worker(name, password, max_builds = max_builds[name], properties = sl_props))
110
111 # 'workerPortnum' defines the TCP port to listen on for connections from workers.
112 # This must match the value configured into the buildworkers (with their
113 # --master option)
114 c['protocols'] = {'pb': {'port': worker_port}}
115
116 # coalesce builds
117 c['collapseRequests'] = True
118
119 # Reduce amount of backlog data
120 c['configurators'] = [util.JanitorConfigurator(
121 logHorizon=timedelta(days=3),
122 hour=6,
123 )]
124
125 ####### CHANGESOURCES
126
127 work_dir = os.path.abspath(ini.get("general", "workdir") or ".")
128 scripts_dir = os.path.abspath("../scripts")
129
130 rsync_bin_url = ini.get("rsync", "binary_url")
131 rsync_bin_key = ini.get("rsync", "binary_password")
132
133 rsync_src_url = None
134 rsync_src_key = None
135
136 if ini.has_option("rsync", "source_url"):
137 rsync_src_url = ini.get("rsync", "source_url")
138 rsync_src_key = ini.get("rsync", "source_password")
139
140 rsync_sdk_url = None
141 rsync_sdk_key = None
142 rsync_sdk_pat = "openwrt-sdk-*.tar.xz"
143
144 if ini.has_option("rsync", "sdk_url"):
145 rsync_sdk_url = ini.get("rsync", "sdk_url")
146
147 if ini.has_option("rsync", "sdk_password"):
148 rsync_sdk_key = ini.get("rsync", "sdk_password")
149
150 if ini.has_option("rsync", "sdk_pattern"):
151 rsync_sdk_pat = ini.get("rsync", "sdk_pattern")
152
153 rsync_defopts = ["-4", "-v", "--timeout=120"]
154
155 repo_url = ini.get("repo", "url")
156 repo_branch = "master"
157
158 if ini.has_option("repo", "branch"):
159 repo_branch = ini.get("repo", "branch")
160
161 usign_key = None
162 usign_comment = "untrusted comment: " + repo_branch.replace("-", " ").title() + " key"
163
164 if ini.has_option("usign", "key"):
165 usign_key = ini.get("usign", "key")
166
167 if ini.has_option("usign", "comment"):
168 usign_comment = ini.get("usign", "comment")
169
170
171 # find arches
172 arches = [ ]
173 archnames = [ ]
174
175 if not os.path.isdir(work_dir+'/source.git'):
176 subprocess.call(["git", "clone", "--depth=1", "--branch="+repo_branch, repo_url, work_dir+'/source.git'])
177 else:
178 subprocess.call(["git", "pull"], cwd = work_dir+'/source.git')
179
180 os.makedirs(work_dir+'/source.git/tmp', exist_ok=True)
181 findarches = subprocess.Popen(['./scripts/dump-target-info.pl', 'architectures'],
182 stdout = subprocess.PIPE, cwd = work_dir+'/source.git')
183
184 while True:
185 line = findarches.stdout.readline()
186 if not line:
187 break
188 at = line.decode().strip().split()
189 arches.append(at)
190 archnames.append(at[0])
191
192
193 # find feeds
194 feeds = []
195 feedbranches = dict()
196
197 c['change_source'] = []
198
199 def parse_feed_entry(line):
200 parts = line.strip().split()
201 if parts[0].startswith("src-git"):
202 feeds.append(parts)
203 url = parts[2].strip().split(';')
204 branch = url[1] if len(url) > 1 else 'master'
205 feedbranches[url[0]] = branch
206 c['change_source'].append(GitPoller(url[0], branch=branch, workdir='%s/%s.git' %(os.getcwd(), parts[1]), pollinterval=300))
207
208 make = subprocess.Popen(['make', '--no-print-directory', '-C', work_dir+'/source.git/target/sdk/', 'val.BASE_FEED'],
209 env = dict(os.environ, TOPDIR=work_dir+'/source.git'), stdout = subprocess.PIPE)
210
211 line = make.stdout.readline()
212 if line:
213 parse_feed_entry(str(line, 'utf-8'))
214
215 with open(work_dir+'/source.git/feeds.conf.default', 'r', encoding='utf-8') as f:
216 for line in f:
217 parse_feed_entry(line)
218
219 if len(c['change_source']) == 0:
220 log.err("FATAL ERROR: no change_sources defined, aborting!")
221 sys.exit(-1)
222
223 ####### SCHEDULERS
224
225 # Configure the Schedulers, which decide how to react to incoming changes. In this
226 # case, just kick off a 'basebuild' build
227
228 c['schedulers'] = []
229 c['schedulers'].append(SingleBranchScheduler(
230 name = "all",
231 change_filter = filter.ChangeFilter(
232 filter_fn = lambda change: change.branch == feedbranches[change.repository]
233 ),
234 treeStableTimer = 60,
235 builderNames = archnames))
236
237 c['schedulers'].append(ForceScheduler(
238 name = "force",
239 buttonName = "Force builds",
240 label = "Force build details",
241 builderNames = [ "00_force_build" ],
242
243 codebases = [
244 util.CodebaseParameter(
245 "",
246 label = "Repository",
247 branch = util.FixedParameter(name = "branch", default = ""),
248 revision = util.FixedParameter(name = "revision", default = ""),
249 repository = util.FixedParameter(name = "repository", default = ""),
250 project = util.FixedParameter(name = "project", default = "")
251 )
252 ],
253
254 reason = util.StringParameter(
255 name = "reason",
256 label = "Reason",
257 default = "Trigger build",
258 required = True,
259 size = 80
260 ),
261
262 properties = [
263 util.NestedParameter(
264 name="options",
265 label="Build Options",
266 layout="vertical",
267 fields=[
268 util.ChoiceStringParameter(
269 name = "architecture",
270 label = "Build architecture",
271 default = "all",
272 choices = [ "all" ] + archnames
273 )
274 ]
275 )
276 ]
277 ))
278
279 ####### BUILDERS
280
281 # The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
282 # what steps, and which workers can execute them. Note that any particular build will
283 # only take place on one worker.
284
285 @properties.renderer
286 def GetDirectorySuffix(props):
287 verpat = re.compile(r'^([0-9]{2})\.([0-9]{2})(?:\.([0-9]+)(?:-rc([0-9]+))?|-(SNAPSHOT))$')
288 if props.hasProperty("release_version"):
289 m = verpat.match(props["release_version"])
290 if m is not None:
291 return "-%02d.%02d" %(int(m.group(1)), int(m.group(2)))
292 return ""
293
294 @properties.renderer
295 def GetNumJobs(props):
296 if props.hasProperty("workername") and props.hasProperty("nproc"):
297 return str(int(props["nproc"]) / max_builds[props["workername"]])
298 else:
299 return "1"
300
301 @properties.renderer
302 def GetCwd(props):
303 if props.hasProperty("builddir"):
304 return props["builddir"]
305 elif props.hasProperty("workdir"):
306 return props["workdir"]
307 else:
308 return "/"
309
310 def IsArchitectureSelected(target):
311 def CheckArchitectureProperty(step):
312 try:
313 options = step.getProperty("options")
314 if type(options) is dict:
315 selected_arch = options.get("architecture", "all")
316 if selected_arch != "all" and selected_arch != target:
317 return False
318 except KeyError:
319 pass
320
321 return True
322
323 return CheckArchitectureProperty
324
325 def UsignSec2Pub(seckey, comment="untrusted comment: secret key"):
326 try:
327 seckey = base64.b64decode(seckey)
328 except Exception:
329 return None
330
331 return "{}\n{}".format(re.sub(r"\bsecret key$", "public key", comment),
332 base64.b64encode(seckey[0:2] + seckey[32:40] + seckey[72:]))
333
334 def IsSharedWorkdir(step):
335 return bool(step.getProperty("shared_wd"))
336
337 @defer.inlineCallbacks
338 def getNewestCompleteTime(bldr):
339 """Returns the complete_at of the latest completed and not SKIPPED
340 build request for this builder, or None if there are no such build
341 requests. We need to filter out SKIPPED requests because we're
342 using collapseRequests=True which is unfortunately marking all
343 previous requests as complete when new buildset is created.
344
345 @returns: datetime instance or None, via Deferred
346 """
347
348 bldrid = yield bldr.getBuilderId()
349 completed = yield bldr.master.data.get(
350 ('builders', bldrid, 'buildrequests'),
351 [
352 resultspec.Filter('complete', 'eq', [True]),
353 resultspec.Filter('results', 'ne', [results.SKIPPED]),
354 ],
355 order=['-complete_at'], limit=1)
356 if not completed:
357 return
358
359 complete_at = completed[0]['complete_at']
360
361 last_build = yield bldr.master.data.get(
362 ('builds', ),
363 [
364 resultspec.Filter('builderid', 'eq', [bldrid]),
365 ],
366 order=['-started_at'], limit=1)
367
368 if last_build and last_build[0]:
369 last_complete_at = last_build[0]['complete_at']
370 if last_complete_at and (last_complete_at > complete_at):
371 return last_complete_at
372
373 return complete_at
374
375 @defer.inlineCallbacks
376 def prioritizeBuilders(master, builders):
377 """Returns sorted list of builders by their last timestamp of completed and
378 not skipped build.
379
380 @returns: list of sorted builders
381 """
382
383 def is_building(bldr):
384 return bool(bldr.building) or bool(bldr.old_building)
385
386 def bldr_info(bldr):
387 d = defer.maybeDeferred(getNewestCompleteTime, bldr)
388 d.addCallback(lambda complete_at: (complete_at, bldr))
389 return d
390
391 def bldr_sort(item):
392 (complete_at, bldr) = item
393
394 if not complete_at:
395 date = datetime.min
396 complete_at = date.replace(tzinfo=tzutc())
397
398 if is_building(bldr):
399 date = datetime.max
400 complete_at = date.replace(tzinfo=tzutc())
401
402 return (complete_at, bldr.name)
403
404 results = yield defer.gatherResults([bldr_info(bldr) for bldr in builders])
405 results.sort(key=bldr_sort)
406
407 for r in results:
408 log.msg("prioritizeBuilders: {:>20} complete_at: {}".format(r[1].name, r[0]))
409
410 return [r[1] for r in results]
411
412 c['prioritizeBuilders'] = prioritizeBuilders
413 c['builders'] = []
414
415 dlLock = locks.WorkerLock("worker_dl")
416
417 workerNames = [ ]
418
419 for worker in c['workers']:
420 workerNames.append(worker.workername)
421
422 force_factory = BuildFactory()
423
424 c['builders'].append(BuilderConfig(
425 name = "00_force_build",
426 workernames = workerNames,
427 factory = force_factory))
428
429 for arch in arches:
430 ts = arch[1].split('/')
431
432 factory = BuildFactory()
433
434 # setup shared work directory if required
435 factory.addStep(ShellCommand(
436 name = "sharedwd",
437 description = "Setting up shared work directory",
438 command = 'test -L "$PWD" || (mkdir -p ../shared-workdir && rm -rf "$PWD" && ln -s shared-workdir "$PWD")',
439 workdir = ".",
440 haltOnFailure = True,
441 doStepIf = IsSharedWorkdir))
442
443 # find number of cores
444 factory.addStep(SetPropertyFromCommand(
445 name = "nproc",
446 property = "nproc",
447 description = "Finding number of CPUs",
448 command = ["nproc"]))
449
450 # prepare workspace
451 factory.addStep(FileDownload(
452 mastersrc = scripts_dir + '/cleanup.sh',
453 workerdest = "../cleanup.sh",
454 mode = 0o755))
455
456 if not persistent:
457 factory.addStep(ShellCommand(
458 name = "cleanold",
459 description = "Cleaning previous builds",
460 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "full"],
461 workdir = ".",
462 haltOnFailure = True,
463 timeout = 2400))
464
465 factory.addStep(ShellCommand(
466 name = "cleanup",
467 description = "Cleaning work area",
468 command = ["./cleanup.sh", buildbot_url, Interpolate("%(prop:workername)s"), Interpolate("%(prop:buildername)s"), "single"],
469 workdir = ".",
470 haltOnFailure = True,
471 timeout = 2400))
472
473 # expire tree if needed
474 elif tree_expire > 0:
475 factory.addStep(FileDownload(
476 mastersrc = scripts_dir + '/expire.sh',
477 workerdest = "../expire.sh",
478 mode = 0o755))
479
480 factory.addStep(ShellCommand(
481 name = "expire",
482 description = "Checking for build tree expiry",
483 command = ["./expire.sh", str(tree_expire)],
484 workdir = ".",
485 haltOnFailure = True,
486 timeout = 2400))
487
488 factory.addStep(ShellCommand(
489 name = "mksdkdir",
490 description = "Preparing SDK directory",
491 command = ["mkdir", "-p", "sdk"],
492 haltOnFailure = True))
493
494 factory.addStep(ShellCommand(
495 name = "downloadsdk",
496 description = "Downloading SDK archive",
497 command = ["rsync"] + rsync_defopts + ["-a", "%s/%s/%s/%s" %(rsync_sdk_url, ts[0], ts[1], rsync_sdk_pat), "sdk.archive"],
498 env={'RSYNC_PASSWORD': rsync_sdk_key},
499 haltOnFailure = True,
500 logEnviron = False))
501
502 factory.addStep(ShellCommand(
503 name = "unpacksdk",
504 description = "Unpacking SDK archive",
505 command = "rm -rf sdk_update && mkdir sdk_update && tar --strip-components=1 -C sdk_update/ -vxf sdk.archive",
506 haltOnFailure = True))
507
508 factory.addStep(ShellCommand(
509 name = "updatesdk",
510 description = "Updating SDK",
511 command = "rsync " + (" ").join(rsync_defopts) + " --checksum -a sdk_update/ sdk/ && rm -rf sdk_update",
512 haltOnFailure = True))
513
514 factory.addStep(ShellCommand(
515 name = "cleancmdlinks",
516 description = "Sanitizing host command symlinks",
517 command = "find sdk/staging_dir/host/bin/ -type l -exec sh -c 'case $(readlink {}) in /bin/*|/usr/bin/*) true;; /*) rm -vf {};; esac' \\;",
518 haltOnFailure = True))
519
520 factory.addStep(StringDownload(
521 name = "writeversionmk",
522 s = 'TOPDIR:=${CURDIR}\n\ninclude $(TOPDIR)/include/version.mk\n\nversion:\n\t@echo $(VERSION_NUMBER)\n',
523 workerdest = "sdk/getversion.mk",
524 mode = 0o755))
525
526 factory.addStep(SetPropertyFromCommand(
527 name = "getversion",
528 property = "release_version",
529 description = "Finding SDK release version",
530 workdir = "build/sdk",
531 command = ["make", "-f", "getversion.mk"]))
532
533 # install build key
534 if usign_key is not None:
535 factory.addStep(StringDownload(
536 name = "dlkeybuildpub",
537 s = UsignSec2Pub(usign_key, usign_comment),
538 workerdest = "sdk/key-build.pub",
539 mode = 0o600))
540
541 factory.addStep(StringDownload(
542 name = "dlkeybuild",
543 s = "# fake private key",
544 workerdest = "sdk/key-build",
545 mode = 0o600))
546
547 factory.addStep(StringDownload(
548 name = "dlkeybuilducert",
549 s = "# fake certificate",
550 workerdest = "sdk/key-build.ucert",
551 mode = 0o600))
552
553 factory.addStep(ShellCommand(
554 name = "mkdldir",
555 description = "Preparing download directory",
556 command = ["sh", "-c", "mkdir -p $HOME/dl && rm -rf ./sdk/dl && ln -sf $HOME/dl ./sdk/dl"],
557 haltOnFailure = True))
558
559 factory.addStep(ShellCommand(
560 name = "mkconf",
561 description = "Preparing SDK configuration",
562 workdir = "build/sdk",
563 command = ["sh", "-c", "rm -f .config && make defconfig"]))
564
565 factory.addStep(FileDownload(
566 mastersrc = scripts_dir + '/ccache.sh',
567 workerdest = 'sdk/ccache.sh',
568 mode = 0o755))
569
570 factory.addStep(ShellCommand(
571 name = "prepccache",
572 description = "Preparing ccache",
573 workdir = "build/sdk",
574 command = ["./ccache.sh"],
575 haltOnFailure = True))
576
577 if git_ssh:
578 factory.addStep(StringDownload(
579 name = "dlgitclonekey",
580 s = git_ssh_key,
581 workerdest = "../git-clone.key",
582 mode = 0o600))
583
584 factory.addStep(ShellCommand(
585 name = "patchfeedsconf",
586 description = "Patching feeds.conf",
587 workdir = "build/sdk",
588 command = "sed -e 's#https://#ssh://git@#g' feeds.conf.default > feeds.conf",
589 haltOnFailure = True))
590
591 factory.addStep(ShellCommand(
592 name = "updatefeeds",
593 description = "Updating feeds",
594 workdir = "build/sdk",
595 command = ["./scripts/feeds", "update", "-f"],
596 env = {'GIT_SSH_COMMAND': Interpolate("ssh -o IdentitiesOnly=yes -o IdentityFile=%(kw:cwd)s/git-clone.key -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no", cwd=GetCwd)} if git_ssh else {},
597 haltOnFailure = True))
598
599 if git_ssh:
600 factory.addStep(ShellCommand(
601 name = "rmfeedsconf",
602 description = "Removing feeds.conf",
603 workdir = "build/sdk",
604 command=["rm", "feeds.conf"],
605 haltOnFailure = True))
606
607 factory.addStep(ShellCommand(
608 name = "installfeeds",
609 description = "Installing feeds",
610 workdir = "build/sdk",
611 command = ["./scripts/feeds", "install", "-a"],
612 haltOnFailure = True))
613
614 factory.addStep(ShellCommand(
615 name = "logclear",
616 description = "Clearing failure logs",
617 workdir = "build/sdk",
618 command = ["rm", "-rf", "logs/package/error.txt", "faillogs/"],
619 haltOnFailure = False,
620 flunkOnFailure = False,
621 warnOnFailure = True,
622 ))
623
624 factory.addStep(ShellCommand(
625 name = "compile",
626 description = "Building packages",
627 workdir = "build/sdk",
628 timeout = 3600,
629 command = ["make", Interpolate("-j%(kw:jobs)s", jobs=GetNumJobs), "IGNORE_ERRORS=n m y", "BUILD_LOG=1", "CONFIG_AUTOREMOVE=y", "CONFIG_SIGNED_PACKAGES="],
630 env = {'CCACHE_BASEDIR': Interpolate("%(kw:cwd)s", cwd=GetCwd)},
631 haltOnFailure = True))
632
633 factory.addStep(ShellCommand(
634 name = "mkfeedsconf",
635 description = "Generating pinned feeds.conf",
636 workdir = "build/sdk",
637 command = "./scripts/feeds list -s -f > bin/packages/%s/feeds.conf" %(arch[0])))
638
639 factory.addStep(ShellCommand(
640 name = "checksums",
641 description = "Calculating checksums",
642 descriptionDone="Checksums calculated",
643 workdir = "build/sdk",
644 command = "cd bin/packages/%s; " %(arch[0]) + "find . -type f -not -name 'sha256sums' -printf \"%P\n\" | sort | xargs -r ../../../staging_dir/host/bin/mkhash -n sha256 | sed -ne 's!^\(.*\) \(.*\)$!\1 *\2!p' > sha256sums)",
645 haltOnFailure = True
646 ))
647
648 if ini.has_option("gpg", "key") or usign_key is not None:
649 factory.addStep(MasterShellCommand(
650 name = "signprepare",
651 description = "Preparing temporary signing directory",
652 command = ["mkdir", "-p", "%s/signing" %(work_dir)],
653 haltOnFailure = True
654 ))
655
656 factory.addStep(ShellCommand(
657 name = "signpack",
658 description = "Packing files to sign",
659 workdir = "build/sdk",
660 command = "find bin/packages/%s/ -mindepth 2 -maxdepth 2 -type f -name Packages -print0 | xargs -0 tar -czf sign.tar.gz" %(arch[0]),
661 haltOnFailure = True
662 ))
663
664 factory.addStep(FileUpload(
665 workersrc = "sdk/sign.tar.gz",
666 masterdest = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
667 haltOnFailure = True
668 ))
669
670 factory.addStep(MasterShellCommand(
671 name = "signfiles",
672 description = "Signing files",
673 command = ["%s/signall.sh" %(scripts_dir), "%s/signing/%s.tar.gz" %(work_dir, arch[0])],
674 env = { 'CONFIG_INI': os.getenv("BUILDMASTER_CONFIG", "./config.ini") },
675 haltOnFailure = True
676 ))
677
678 factory.addStep(FileDownload(
679 mastersrc = "%s/signing/%s.tar.gz" %(work_dir, arch[0]),
680 workerdest = "sdk/sign.tar.gz",
681 haltOnFailure = True
682 ))
683
684 factory.addStep(ShellCommand(
685 name = "signunpack",
686 description = "Unpacking signed files",
687 workdir = "build/sdk",
688 command = ["tar", "-xzf", "sign.tar.gz"],
689 haltOnFailure = True
690 ))
691
692 # download remote sha256sums to 'target-sha256sums'
693 factory.addStep(ShellCommand(
694 name = "target-sha256sums",
695 description = "Fetching remote sha256sums for arch",
696 command = ["rsync"] + rsync_defopts + ["-z", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/sha256sums", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0]), "arch-sha256sums"],
697 env={'RSYNC_PASSWORD': rsync_bin_key},
698 logEnviron = False,
699 haltOnFailure = False,
700 flunkOnFailure = False,
701 warnOnFailure = False,
702 ))
703
704 factory.addStep(FileDownload(
705 name="dlrsync.sh",
706 mastersrc = scripts_dir + "/rsync.sh",
707 workerdest = "../rsync.sh",
708 mode = 0o755
709 ))
710
711 factory.addStep(FileDownload(
712 name = "dlsha2rsyncpl",
713 mastersrc = "sha2rsync.pl",
714 workerdest = "../sha2rsync.pl",
715 mode = 0o755,
716 ))
717
718 factory.addStep(ShellCommand(
719 name = "buildlist",
720 description = "Building list of files to upload",
721 workdir = "build/sdk",
722 command = ["../../../sha2rsync.pl", "../../arch-sha256sums", "bin/packages/%s/sha256sums" %(arch[0]), "rsynclist"],
723 haltOnFailure = True,
724 ))
725
726 factory.addStep(ShellCommand(
727 name = "uploadprepare",
728 description = "Preparing package directory",
729 workdir = "build/sdk",
730 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
731 env={'RSYNC_PASSWORD': rsync_bin_key},
732 haltOnFailure = True,
733 logEnviron = False
734 ))
735
736 factory.addStep(ShellCommand(
737 name = "packageupload",
738 description = "Uploading package files",
739 workdir = "build/sdk",
740 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=rsynclist", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
741 env={'RSYNC_PASSWORD': rsync_bin_key},
742 haltOnFailure = True,
743 logEnviron = False
744 ))
745
746 factory.addStep(ShellCommand(
747 name = "packageprune",
748 description = "Pruning package files",
749 workdir = "build/sdk",
750 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--existing", "--ignore-existing", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-a", "bin/packages/%s/" %(arch[0]), Interpolate("%(kw:rsyncbinurl)s/packages%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
751 env={'RSYNC_PASSWORD': rsync_bin_key},
752 haltOnFailure = True,
753 logEnviron = False
754 ))
755
756 factory.addStep(ShellCommand(
757 name = "logprepare",
758 description = "Preparing log directory",
759 workdir = "build/sdk",
760 command = ["rsync"] + rsync_defopts + ["-a", "--include", "/%s/" %(arch[0]), "--exclude", "/*", "--exclude", "/%s/*" %(arch[0]), "bin/packages/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix)],
761 env={'RSYNC_PASSWORD': rsync_bin_key},
762 haltOnFailure = True,
763 logEnviron = False
764 ))
765
766 factory.addStep(ShellCommand(
767 name = "logfind",
768 description = "Finding failure logs",
769 workdir = "build/sdk/logs/package/feeds",
770 command = ["sh", "-c", "sed -ne 's!^ *ERROR: package/feeds/\\([^ ]*\\) .*$!\\1!p' ../error.txt | sort -u | xargs -r find > ../../../logs.txt"],
771 haltOnFailure = False,
772 flunkOnFailure = False,
773 warnOnFailure = True,
774 ))
775
776 factory.addStep(ShellCommand(
777 name = "logcollect",
778 description = "Collecting failure logs",
779 workdir = "build/sdk",
780 command = ["rsync"] + rsync_defopts + ["-a", "--files-from=logs.txt", "logs/package/feeds/", "faillogs/"],
781 haltOnFailure = False,
782 flunkOnFailure = False,
783 warnOnFailure = True,
784 ))
785
786 factory.addStep(ShellCommand(
787 name = "logupload",
788 description = "Uploading failure logs",
789 workdir = "build/sdk",
790 command = ["../../../rsync.sh"] + rsync_defopts + ["--delete", "--delay-updates", "--partial-dir=.~tmp~%s" %(arch[0]), "-az", "faillogs/", Interpolate("%(kw:rsyncbinurl)s/faillogs%(kw:suffix)s/%(kw:archname)s/", rsyncbinurl=rsync_bin_url, suffix=GetDirectorySuffix, archname=arch[0])],
791 env={'RSYNC_PASSWORD': rsync_bin_key},
792 haltOnFailure = False,
793 flunkOnFailure = False,
794 warnOnFailure = True,
795 logEnviron = False
796 ))
797
798 if rsync_src_url is not None:
799 factory.addStep(ShellCommand(
800 name = "sourcelist",
801 description = "Finding source archives to upload",
802 workdir = "build/sdk",
803 command = "find dl/ -maxdepth 1 -type f -not -size 0 -not -name '.*' -not -name '*.hash' -not -name '*.dl' -newer ../sdk.archive -printf '%f\\n' > sourcelist",
804 haltOnFailure = True
805 ))
806
807 factory.addStep(ShellCommand(
808 name = "sourceupload",
809 description = "Uploading source archives",
810 workdir = "build/sdk",
811 command = ["../../../rsync.sh"] + rsync_defopts + ["--files-from=sourcelist", "--size-only", "--delay-updates",
812 Interpolate("--partial-dir=.~tmp~%(kw:archname)s~%(prop:workername)s", archname=arch[0]), "-a", "dl/", "%s/" %(rsync_src_url)],
813 env={'RSYNC_PASSWORD': rsync_src_key},
814 haltOnFailure = False,
815 flunkOnFailure = False,
816 warnOnFailure = True,
817 logEnviron = False
818 ))
819
820 factory.addStep(ShellCommand(
821 name = "df",
822 description = "Reporting disk usage",
823 command=["df", "-h", "."],
824 env={'LC_ALL': 'C'},
825 haltOnFailure = False,
826 flunkOnFailure = False,
827 warnOnFailure = False,
828 alwaysRun = True
829 ))
830
831 factory.addStep(ShellCommand(
832 name = "du",
833 description = "Reporting estimated file space usage",
834 command=["du", "-sh", "."],
835 env={'LC_ALL': 'C'},
836 haltOnFailure = False,
837 flunkOnFailure = False,
838 warnOnFailure = False,
839 alwaysRun = True
840 ))
841
842 factory.addStep(ShellCommand(
843 name = "ccachestat",
844 description = "Reporting ccache stats",
845 command=["ccache", "-s"],
846 want_stderr = False,
847 haltOnFailure = False,
848 flunkOnFailure = False,
849 warnOnFailure = False,
850 alwaysRun = True,
851 ))
852
853 c['builders'].append(BuilderConfig(name=arch[0], workernames=workerNames, factory=factory))
854
855 c['schedulers'].append(schedulers.Triggerable(name="trigger_%s" % arch[0], builderNames=[ arch[0] ]))
856 force_factory.addStep(steps.Trigger(
857 name = "trigger_%s" % arch[0],
858 description = "Triggering %s build" % arch[0],
859 schedulerNames = [ "trigger_%s" % arch[0] ],
860 set_properties = { "reason": Property("reason") },
861 doStepIf = IsArchitectureSelected(arch[0])
862 ))
863
864 ####### STATUS arches
865
866 # 'status' is a list of Status arches. The results of each build will be
867 # pushed to these arches. buildbot/status/*.py has a variety to choose from,
868 # including web pages, email senders, and IRC bots.
869
870 if ini.has_option("phase2", "status_bind"):
871 c['www'] = {
872 'port': ini.get("phase2", "status_bind"),
873 'plugins': {
874 'waterfall_view': True,
875 'console_view': True,
876 'grid_view': True
877 }
878 }
879
880 if ini.has_option("phase2", "status_user") and ini.has_option("phase2", "status_password"):
881 c['www']['auth'] = util.UserPasswordAuth([
882 (ini.get("phase2", "status_user"), ini.get("phase2", "status_password"))
883 ])
884 c['www']['authz'] = util.Authz(
885 allowRules=[ util.AnyControlEndpointMatcher(role="admins") ],
886 roleMatchers=[ util.RolesFromUsername(roles=["admins"], usernames=[ini.get("phase2", "status_user")]) ]
887 )
888
889 ####### PROJECT IDENTITY
890
891 # the 'title' string will appear at the top of this buildbot
892 # installation's html.WebStatus home page (linked to the
893 # 'titleURL') and is embedded in the title of the waterfall HTML page.
894
895 c['title'] = ini.get("general", "title")
896 c['titleURL'] = ini.get("general", "title_url")
897
898 # the 'buildbotURL' string should point to the location where the buildbot's
899 # internal web server (usually the html.WebStatus page) is visible. This
900 # typically uses the port number set in the Waterfall 'status' entry, but
901 # with an externally-visible host name which the buildbot cannot figure out
902 # without some help.
903
904 c['buildbotURL'] = buildbot_url
905
906 ####### DB URL
907
908 c['db'] = {
909 # This specifies what database buildbot uses to store its state. You can leave
910 # this at its default for all but the largest installations.
911 'db_url' : "sqlite:///state.sqlite",
912 }
913
914 c['buildbotNetUsageData'] = None